Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into java-development
Browse files Browse the repository at this point in the history
  • Loading branch information
johnoliver committed May 29, 2024
2 parents 98c0341 + 5d25f6a commit 0cb826a
Show file tree
Hide file tree
Showing 1,059 changed files with 38,735 additions and 17,190 deletions.
7 changes: 6 additions & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -158,13 +158,18 @@ dotnet_diagnostic.CA1032.severity = none # We're using RCS1194 which seems to co
dotnet_diagnostic.CA1034.severity = none # Do not nest type. Alternatively, change its accessibility so that it is not externally visible
dotnet_diagnostic.CA1062.severity = none # Disable null check, C# already does it for us
dotnet_diagnostic.CA1303.severity = none # Do not pass literals as localized parameters
dotnet_diagnostic.CA1305.severity = none # Operation could vary based on current user's locale settings
dotnet_diagnostic.CA1307.severity = none # Operation has an overload that takes a StringComparison
dotnet_diagnostic.CA1508.severity = none # Avoid dead conditional code. Too many false positives.
dotnet_diagnostic.CA1510.severity = none
dotnet_diagnostic.CA1510.severity = none # ArgumentNullException.Throw
dotnet_diagnostic.CA1512.severity = none # ArgumentOutOfRangeException.Throw
dotnet_diagnostic.CA1515.severity = none # Making public types from exes internal
dotnet_diagnostic.CA1805.severity = none # Member is explicitly initialized to its default value
dotnet_diagnostic.CA1822.severity = none # Member does not access instance data and can be marked as static
dotnet_diagnostic.CA1848.severity = none # For improved performance, use the LoggerMessage delegates
dotnet_diagnostic.CA1849.severity = none # Use async equivalent; analyzer is currently noisy
dotnet_diagnostic.CA1865.severity = none # StartsWith(char)
dotnet_diagnostic.CA1867.severity = none # EndsWith(char)
dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
dotnet_diagnostic.CA2225.severity = none # Operator overloads have named alternates
dotnet_diagnostic.CA2227.severity = none # Change to be read-only by removing the property setter
Expand Down
3 changes: 3 additions & 0 deletions .github/_typos.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ extend-exclude = [
"vocab.bpe",
"CodeTokenizerTests.cs",
"test_code_tokenizer.py",
"*response.json",
]

[default.extend-words]
Expand All @@ -25,6 +26,8 @@ HD = "HD" # Test header value
EOF = "EOF" # End of File
ans = "ans" # Short for answers
arange = "arange" # Method in Python numpy package
prompty = "prompty" # prompty is a format name.
ist = "ist" # German language

[default.extend-identifiers]
ags = "ags" # Azure Graph Service
Expand Down
32 changes: 16 additions & 16 deletions .github/workflows/dotnet-build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,40 +52,40 @@ jobs:
fail-fast: false
matrix:
include:
- { dotnet: "8.0-jammy", os: "ubuntu", configuration: Release }
- {
dotnet: "8.0",
os: "windows",
configuration: Debug,
os: "ubuntu-latest",
configuration: Release,
integration-tests: true,
}
- { dotnet: "8.0", os: "windows", configuration: Release }

runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/dotnet/sdk:${{ matrix.dotnet }}
env:
NUGET_CERT_REVOCATION_MODE: offline
GITHUB_ACTIONS: "true"
- { dotnet: "8.0", os: "windows-latest", configuration: Debug }
- { dotnet: "8.0", os: "windows-latest", configuration: Release }

runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4

- name: Setup dotnet ${{ matrix.dotnet }}
uses: actions/setup-dotnet@v3
with:
dotnet-version: ${{ matrix.dotnet }}
- name: Build dotnet solutions
shell: bash
run: |
export SOLUTIONS=$(find ./dotnet/ -type f -name "*.sln" | tr '\n' ' ')
for solution in $SOLUTIONS; do
dotnet build -c ${{ matrix.configuration }} /warnaserror $solution
dotnet build $solution -c ${{ matrix.configuration }} --warnaserror
done
- name: Run Unit Tests
shell: bash
run: |
export UT_PROJECTS=$(find ./dotnet -type f -name "*.UnitTests.csproj" | grep -v -E "(Experimental.Orchestration.Flow.UnitTests.csproj|Experimental.Assistants.UnitTests.csproj)" | tr '\n' ' ')
for project in $UT_PROJECTS; do
dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx --collect:"XPlat Code Coverage" --results-directory:"TestResults/Coverage/"
dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx --collect:"XPlat Code Coverage" --results-directory:"TestResults/Coverage/" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.ExcludeByAttribute=ObsoleteAttribute,GeneratedCodeAttribute,CompilerGeneratedAttribute,ExcludeFromCodeCoverageAttribute
done
- name: Run Integration Tests
shell: bash
if: github.event_name != 'pull_request' && matrix.integration-tests
run: |
export INTEGRATION_TEST_PROJECTS=$(find ./dotnet -type f -name "*IntegrationTests.csproj" | grep -v "Experimental.Orchestration.Flow.IntegrationTests.csproj" | tr '\n' ' ')
Expand All @@ -98,9 +98,9 @@ jobs:
AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }}
AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDING__DEPLOYMENTNAME }}
AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }}
AzureOpenAI__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
AzureOpenAIEmbeddings__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
AzureOpenAIEmbeddings__ApiKey: ${{ secrets.AZUREOPENAI_EASTUS__APIKEY }}
Planners__AzureOpenAI__ApiKey: ${{ secrets.PLANNERS__AZUREOPENAI__APIKEY }}
Planners__AzureOpenAI__Endpoint: ${{ secrets.PLANNERS__AZUREOPENAI__ENDPOINT }}
Planners__AzureOpenAI__DeploymentName: ${{ vars.PLANNERS__AZUREOPENAI__DEPLOYMENTNAME }}
Expand Down
70 changes: 30 additions & 40 deletions .github/workflows/python-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,26 +76,21 @@ jobs:
env: # Set Azure credentials secret as an input
HNSWLIB_NO_NATIVE: 1
Python_Integration_Tests: Python_Integration_Tests
AzureOpenAI__Label: azure-text-davinci-003
AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002
AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }}
AzureOpenAI__Text__DeploymentName: ${{ vars.AZUREOPENAI__TEXT__DEPLOYMENTNAME }}
AzureOpenAIChat__DeploymentName: ${{ vars.AZUREOPENAI__CHAT__DEPLOYMENTNAME }}
AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDINGS__DEPLOYMENTNAME2 }}
AzureOpenAIEmbeddings_EastUS__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDINGS_EASTUS__DEPLOYMENTNAME}}
AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAI_EastUS__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }}
AzureOpenAI_EastUS__ApiKey: ${{ secrets.AZUREOPENAI_EASTUS__APIKEY }}
AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAI__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
AzureOpenAIEmbeddings__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
Bing__ApiKey: ${{ secrets.BING__APIKEY }}
OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }}
Pinecone__ApiKey: ${{ secrets.PINECONE__APIKEY }}
Pinecone__Environment: ${{ secrets.PINECONE__ENVIRONMENT }}
Postgres__Connectionstr: ${{secrets.POSTGRES__CONNECTIONSTR}}
AZURE_COGNITIVE_SEARCH_ADMIN_KEY: ${{secrets.AZURE_COGNITIVE_SEARCH_ADMIN_KEY}}
AZURE_COGNITIVE_SEARCH_ENDPOINT: ${{secrets.AZURE_COGNITIVE_SEARCH_ENDPOINT}}
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME }} # azure-text-embedding-ada-002
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }}
AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
BING_API_KEY: ${{ secrets.BING_API_KEY }}
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }}
OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }}
OPENAI_EMBEDDING_MODEL_ID: ${{ vars.OPENAI_EMBEDDING_MODEL_ID }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
PINECONE_API_KEY: ${{ secrets.PINECONE__APIKEY }}
POSTGRES_CONNECTION_STRING: ${{secrets.POSTGRES__CONNECTIONSTR}}
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
Expand Down Expand Up @@ -143,26 +138,21 @@ jobs:
env: # Set Azure credentials secret as an input
HNSWLIB_NO_NATIVE: 1
Python_Integration_Tests: Python_Integration_Tests
AzureOpenAI__Label: azure-text-davinci-003
AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002
AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }}
AzureOpenAI__Text__DeploymentName: ${{ vars.AZUREOPENAI__TEXT__DEPLOYMENTNAME }}
AzureOpenAIChat__DeploymentName: ${{ vars.AZUREOPENAI__CHAT__DEPLOYMENTNAME }}
AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDINGS__DEPLOYMENTNAME2 }}
AzureOpenAIEmbeddings_EastUS__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDINGS_EASTUS__DEPLOYMENTNAME}}
AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAI__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
AzureOpenAI_EastUS__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }}
AzureOpenAI_EastUS__ApiKey: ${{ secrets.AZUREOPENAI_EASTUS__APIKEY }}
AzureOpenAIEmbeddings__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }}
Bing__ApiKey: ${{ secrets.BING__APIKEY }}
OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }}
Pinecone__ApiKey: ${{ secrets.PINECONE__APIKEY }}
Pinecone__Environment: ${{ secrets.PINECONE__ENVIRONMENT }}
Postgres__Connectionstr: ${{secrets.POSTGRES__CONNECTIONSTR}}
AZURE_COGNITIVE_SEARCH_ADMIN_KEY: ${{secrets.AZURE_COGNITIVE_SEARCH_ADMIN_KEY}}
AZURE_COGNITIVE_SEARCH_ENDPOINT: ${{secrets.AZURE_COGNITIVE_SEARCH_ENDPOINT}}
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME }} # azure-text-embedding-ada-002
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }}
AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
BING_API_KEY: ${{ secrets.BING_API_KEY }}
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }}
OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }}
OPENAI_EMBEDDING_MODEL_ID: ${{ vars.OPENAI_EMBEDDING_MODEL_ID }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
PINECONE_API_KEY: ${{ secrets.PINECONE__APIKEY }}
POSTGRES_CONNECTION_STRING: ${{secrets.POSTGRES__CONNECTIONSTR}}
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
Expand Down
55 changes: 5 additions & 50 deletions .github/workflows/python-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,67 +7,22 @@ on:
- 'python/**'

jobs:
ruff:
pre-commit:
if: '!cancelled()'
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
runs-on: ubuntu-latest
timeout-minutes: 5
continue-on-error: true
steps:
- run: echo "/root/.local/bin" >> $GITHUB_PATH
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install Semantic Kernel
run: cd python && poetry install --no-ansi
- name: Run ruff
run: cd python && poetry run ruff check .
black:
if: '!cancelled()'
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- run: echo "/root/.local/bin" >> $GITHUB_PATH
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install Semantic Kernel
run: cd python && poetry install --no-ansi
- name: Run black
run: cd python && poetry run black --check .
mypy:
if: '!cancelled()'
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- run: echo "/root/.local/bin" >> $GITHUB_PATH
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install Semantic Kernel
run: cd python && poetry install --no-ansi
- name: Run mypy
run: cd python && poetry run mypy -p semantic_kernel --config-file=mypy.ini

- name: Install dependencies
run: cd python && poetry install
- uses: pre-commit/[email protected]
6 changes: 5 additions & 1 deletion .github/workflows/python-test-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ jobs:
python-tests-coverage:
name: Create Test Coverage Messages
runs-on: ${{ matrix.os }}
continue-on-error: true
permissions:
pull-requests: write
contents: read
Expand All @@ -21,14 +20,17 @@ jobs:
os: [ubuntu-latest]
steps:
- name: Wait for unit tests to succeed
continue-on-error: true
uses: lewagon/[email protected]
with:
ref: ${{ github.event.pull_request.head.sha }}
check-name: 'Python Unit Tests (${{ matrix.python-version}}, ${{ matrix.os }})'
repo-token: ${{ secrets.GH_ACTIONS_PR_WRITE }}
wait-interval: 10
allowed-conclusions: success
- uses: actions/checkout@v4
- name: Download coverage
continue-on-error: true
uses: dawidd6/action-download-artifact@v3
with:
name: python-coverage-${{ matrix.os }}-${{ matrix.python-version }}.txt
Expand All @@ -37,6 +39,7 @@ jobs:
search_artifacts: true
if_no_artifact_found: warn
- name: Download pytest
continue-on-error: true
uses: dawidd6/action-download-artifact@v3
with:
name: pytest-${{ matrix.os }}-${{ matrix.python-version }}.xml
Expand All @@ -45,6 +48,7 @@ jobs:
search_artifacts: true
if_no_artifact_found: warn
- name: Pytest coverage comment
continue-on-error: true
id: coverageComment
uses: MishaKav/pytest-coverage-comment@main
with:
Expand Down
32 changes: 26 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,37 @@ repos:
- id: sync_with_poetry
args: [--config=.pre-commit-config.yaml, --db=python/.conf/packages_list.json, python/poetry.lock]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
rev: v4.6.0
hooks:
- id: check-toml
files: \.toml$
- id: check-yaml
files: \.yaml$
- id: check-json
files: \.json$
exclude: ^python\/\.vscode\/.*
- id: end-of-file-fixer
files: \.py$
- id: mixed-line-ending
files: \.py$
- repo: https://github.com/psf/black
rev: 24.4.0
- id: debug-statements
files: ^python\/semantic_kernel\/.*\.py$
- id: check-ast
name: Check Valid Python Samples
types: ["python"]
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.8.5
hooks:
- id: black
files: \.py$
- id: nbqa-check-ast
name: Check Valid Python Notebooks
types: ["jupyter"]
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.2
hooks:
- id: pyupgrade
args: [--py310-plus]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.1
rev: v0.4.5
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
Expand All @@ -36,3 +50,9 @@ repos:
language: system
types: [python]
pass_filenames: false
- repo: https://github.com/PyCQA/bandit
rev: 1.7.8
hooks:
- id: bandit
args: ["-c", "python/pyproject.toml"]
additional_dependencies: [ "bandit[toml]" ]
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
},
"cSpell.words": [
"Partitioner",
"Prompty",
"SKEXP"
],
"[java]": {
Expand Down
Loading

0 comments on commit 0cb826a

Please sign in to comment.