diff --git a/azure_functions_worker/protos/.gitignore b/azure_functions_worker/protos/.gitignore index f43e6c214..49d7060ef 100644 --- a/azure_functions_worker/protos/.gitignore +++ b/azure_functions_worker/protos/.gitignore @@ -1,3 +1,2 @@ -/_src *_pb2.py *_pb2_grpc.py diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml index abc84f394..968585017 100644 --- a/eng/templates/jobs/ci-emulator-tests.yml +++ b/eng/templates/jobs/ci-emulator-tests.yml @@ -21,6 +21,8 @@ jobs: PYTHON_VERSION: '3.11' Python312: PYTHON_VERSION: '3.12' + Python313: + PYTHON_VERSION: '3.13' steps: - task: UsePythonVersion@0 inputs: diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml index 631115b4c..bc0c5de1a 100644 --- a/eng/templates/official/jobs/build-artifacts.yml +++ b/eng/templates/official/jobs/build-artifacts.yml @@ -24,6 +24,9 @@ jobs: Python312V4: pythonVersion: '3.12' workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -62,6 +65,9 @@ jobs: Python312V4: pythonVersion: '3.12' workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -100,6 +106,9 @@ jobs: Python312V4: pythonVersion: '3.12' workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -137,6 +146,9 @@ jobs: Python312V4: pythonVersion: '3.12' workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -168,6 +180,9 @@ jobs: Python312V4: pythonVersion: '3.12' workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml index edd898f65..33148bf94 100644 --- a/eng/templates/official/jobs/ci-e2e-tests.yml +++ b/eng/templates/official/jobs/ci-e2e-tests.yml @@ -63,6 +63,15 @@ jobs: SQL_CONNECTION: $(LinuxSqlConnectionString312) EVENTGRID_URI: $(LinuxEventGridTopicUriString312) EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) + Python313: + PYTHON_VERSION: '3.13' + STORAGE_CONNECTION: $(LinuxStorageConnectionString312) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312) + SQL_CONNECTION: $(LinuxSqlConnectionString312) + EVENTGRID_URI: $(LinuxEventGridTopicUriString312) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) steps: - task: UsePythonVersion@0 inputs: diff --git a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec index b3ce47d0c..98b0c832a 100644 --- a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec +++ b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec @@ -38,6 +38,11 @@ + + + + + diff --git a/pack/scripts/mac_arm64_deps.sh b/pack/scripts/mac_arm64_deps.sh index 2d70bafad..9c08cce46 100644 --- a/pack/scripts/mac_arm64_deps.sh +++ b/pack/scripts/mac_arm64_deps.sh @@ -13,4 +13,10 @@ python -m invoke -c test_setup build-protos cd .. cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file + +version_minor=$(echo $1 | cut -d '.' -f 2) +if [[ $version_minor -lt 13 ]]; then + cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" +else + cp -r proxy_worker/protos "$BUILD_SOURCESDIRECTORY/deps/proxy_worker" +fi \ No newline at end of file diff --git a/pack/scripts/nix_deps.sh b/pack/scripts/nix_deps.sh index 2d70bafad..9c08cce46 100644 --- a/pack/scripts/nix_deps.sh +++ b/pack/scripts/nix_deps.sh @@ -13,4 +13,10 @@ python -m invoke -c test_setup build-protos cd .. cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file + +version_minor=$(echo $1 | cut -d '.' -f 2) +if [[ $version_minor -lt 13 ]]; then + cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" +else + cp -r proxy_worker/protos "$BUILD_SOURCESDIRECTORY/deps/proxy_worker" +fi \ No newline at end of file diff --git a/pack/scripts/win_deps.ps1 b/pack/scripts/win_deps.ps1 index a7be372e7..b4c95203d 100644 --- a/pack/scripts/win_deps.ps1 +++ b/pack/scripts/win_deps.ps1 @@ -1,3 +1,9 @@ +param ( + [string]$pythonVersion +) +$versionParts = $pythonVersion -split '\.' # Splitting by dot +$versionMinor = [int]$versionParts[1] + python -m venv .env .env\Scripts\Activate.ps1 python -m pip install --upgrade pip @@ -5,7 +11,6 @@ python -m pip install --upgrade pip python -m pip install . $depsPath = Join-Path -Path $env:BUILD_SOURCESDIRECTORY -ChildPath "deps" -$protosPath = Join-Path -Path $depsPath -ChildPath "azure_functions_worker/protos" python -m pip install . azure-functions --no-compile --target $depsPath.ToString() @@ -15,4 +20,11 @@ python -m invoke -c test_setup build-protos cd .. Copy-Item -Path ".artifactignore" -Destination $depsPath.ToString() -Copy-Item -Path "azure_functions_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force + +if ($versionMinor -lt 13) { + $protosPath = Join-Path -Path $depsPath -ChildPath "azure_functions_worker/protos" + Copy-Item -Path "azure_functions_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force +} else { + $protosPath = Join-Path -Path $depsPath -ChildPath "proxy_worker/protos" + Copy-Item -Path "proxy_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force +} diff --git a/pack/templates/macos_64_env_gen.yml b/pack/templates/macos_64_env_gen.yml index 90a3578d7..75f33bc5f 100644 --- a/pack/templates/macos_64_env_gen.yml +++ b/pack/templates/macos_64_env_gen.yml @@ -8,10 +8,19 @@ steps: inputs: versionSpec: ${{ parameters.pythonVersion }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: ShellScript@2 inputs: disableAutoCwd: true scriptPath: 'pack/scripts/mac_arm64_deps.sh' + args: '${{ parameters.pythonVersion }}' + displayName: 'Install Dependencies' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +50,30 @@ steps: !pkg_resources/** !*.dist-info/** !werkzeug/debug/shared/debugger.js + !proxy_worker/** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)/deps' + contents: | + ** + !grpc_tools/**/* + !grpcio_tools*/* + !build/** + !docs/** + !pack/** + !python/** + !tests/** + !setuptools*/** + !_distutils_hack/** + !distutils-precedence.pth + !pkg_resources/** + !*.dist-info/** + !werkzeug/debug/shared/debugger.js + !azure_functions_worker/** + !dateutil/** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '13') + displayName: 'Copy proxy_worker files' diff --git a/pack/templates/nix_env_gen.yml b/pack/templates/nix_env_gen.yml index ae3cf4330..db3820153 100644 --- a/pack/templates/nix_env_gen.yml +++ b/pack/templates/nix_env_gen.yml @@ -8,10 +8,19 @@ steps: inputs: versionSpec: ${{ parameters.pythonVersion }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: ShellScript@2 inputs: disableAutoCwd: true scriptPath: 'pack/scripts/nix_deps.sh' + args: '${{ parameters.pythonVersion }}' + displayName: 'Install Dependencies' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +50,30 @@ steps: !pkg_resources/** !*.dist-info/** !werkzeug/debug/shared/debugger.js + !proxy_worker/** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)/deps' + contents: | + ** + !grpc_tools/**/* + !grpcio_tools*/* + !build/** + !docs/** + !pack/** + !python/** + !tests/** + !setuptools*/** + !_distutils_hack/** + !distutils-precedence.pth + !pkg_resources/** + !*.dist-info/** + !werkzeug/debug/shared/debugger.js + !dateutil/** + !azure_functions_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '13') + displayName: 'Copy proxy_worker files' diff --git a/pack/templates/win_env_gen.yml b/pack/templates/win_env_gen.yml index 2eee3411a..b85bf9f89 100644 --- a/pack/templates/win_env_gen.yml +++ b/pack/templates/win_env_gen.yml @@ -9,9 +9,17 @@ steps: versionSpec: ${{ parameters.pythonVersion }} architecture: ${{ parameters.architecture }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: PowerShell@2 inputs: filePath: 'pack\scripts\win_deps.ps1' + arguments: '${{ parameters.pythonVersion }}' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +49,30 @@ steps: !pkg_resources\** !*.dist-info\** !werkzeug\debug\shared\debugger.js + !proxy_worker\** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)\deps' + contents: | + ** + !grpc_tools\**\* + !grpcio_tools*\* + !build\** + !docs\** + !pack\** + !python\** + !tests\** + !setuptools*\** + !_distutils_hack\** + !distutils-precedence.pth + !pkg_resources\** + !*.dist-info\** + !werkzeug\debug\shared\debugger.js + !dateutil\** + !azure_functions_worker\** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: in(variables['pythonMinor'], '13') + displayName: 'Copy proxy_worker files' diff --git a/proxy_worker/protos/.gitignore b/proxy_worker/protos/.gitignore index f43e6c214..49d7060ef 100644 --- a/proxy_worker/protos/.gitignore +++ b/proxy_worker/protos/.gitignore @@ -1,3 +1,2 @@ -/_src *_pb2.py *_pb2_grpc.py diff --git a/proxy_worker/protos/_src/.gitignore b/proxy_worker/protos/_src/.gitignore new file mode 100644 index 000000000..940794e60 --- /dev/null +++ b/proxy_worker/protos/_src/.gitignore @@ -0,0 +1,288 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ +**/Properties/launchSettings.json + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Typescript v1 declaration files +typings/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs diff --git a/proxy_worker/protos/_src/LICENSE b/proxy_worker/protos/_src/LICENSE new file mode 100644 index 000000000..21071075c --- /dev/null +++ b/proxy_worker/protos/_src/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/proxy_worker/protos/_src/README.md b/proxy_worker/protos/_src/README.md new file mode 100644 index 000000000..b22f0bb4b --- /dev/null +++ b/proxy_worker/protos/_src/README.md @@ -0,0 +1,98 @@ +# Azure Functions Languge Worker Protobuf + +This repository contains the protobuf definition file which defines the gRPC service which is used between the [Azure Functions Host](https://github.com/Azure/azure-functions-host) and the Azure Functions language workers. This repo is shared across many repos in many languages (for each worker) by using git commands. + +To use this repo in Azure Functions language workers, follow steps below to add this repo as a subtree (*Adding This Repo*). If this repo is already embedded in a language worker repo, follow the steps to update the consumed file (*Pulling Updates*). + +Learn more about Azure Function's projects on the [meta](https://github.com/azure/azure-functions) repo. + +## Adding This Repo + +From within the Azure Functions language worker repo: +1. Define remote branch for cleaner git commands + - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` + - `git fetch proto-file` +2. Index contents of azure-functions-worker-protobuf to language worker repo + - `git read-tree --prefix= -u proto-file/` +3. Add new path in language worker repo to .gitignore file + - In .gitignore, add path in language worker repo +4. Finalize with commit + - `git commit -m "Added subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Branch: . Commit: "` + - `git push` + +## Pulling Updates + +From within the Azure Functions language worker repo: +1. Define remote branch for cleaner git commands + - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` + - `git fetch proto-file` +2. Pull a specific release tag + - `git fetch proto-file refs/tags/` + - Example: `git fetch proto-file refs/tags/v1.1.0-protofile` +3. Merge updates + - Merge with an explicit path to subtree: `git merge -X subtree= --squash --allow-unrelated-histories --strategy-option theirs` + - Example: `git merge -X subtree=src/WebJobs.Script.Grpc/azure-functions-language-worker-protobuf --squash v1.1.0-protofile --allow-unrelated-histories --strategy-option theirs` +4. Finalize with commit + - `git commit -m "Updated subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Tag: . Commit: "` + - `git push` + +## Releasing a Language Worker Protobuf version + +1. Draft a release in the GitHub UI + - Be sure to inculde details of the release +2. Create a release version, following semantic versioning guidelines ([semver.org](https://semver.org/)) +3. Tag the version with the pattern: `v..

-protofile` (example: `v1.1.0-protofile`) +3. Merge `dev` to `master` + +## Consuming FunctionRPC.proto +*Note: Update versionNumber before running following commands* + +## CSharp +``` +set NUGET_PATH="%UserProfile%\.nuget\packages" +set GRPC_TOOLS_PATH=%NUGET_PATH%\grpc.tools\\tools\windows_x86 +set PROTO_PATH=.\azure-functions-language-worker-protobuf\src\proto +set PROTO=.\azure-functions-language-worker-protobuf\src\proto\FunctionRpc.proto +set PROTOBUF_TOOLS=%NUGET_PATH%\google.protobuf.tools\\tools +set MSGDIR=.\Messages + +if exist %MSGDIR% rmdir /s /q %MSGDIR% +mkdir %MSGDIR% + +set OUTDIR=%MSGDIR%\DotNet +mkdir %OUTDIR% +%GRPC_TOOLS_PATH%\protoc.exe %PROTO% --csharp_out %OUTDIR% --grpc_out=%OUTDIR% --plugin=protoc-gen-grpc=%GRPC_TOOLS_PATH%\grpc_csharp_plugin.exe --proto_path=%PROTO_PATH% --proto_path=%PROTOBUF_TOOLS% +``` +## JavaScript +In package.json, add to the build script the following commands to build .js files and to build .ts files. Use and install npm package `protobufjs`. + +Generate JavaScript files: +``` +pbjs -t json-module -w commonjs -o azure-functions-language-worker-protobuf/src/rpc.js azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto +``` +Generate TypeScript files: +``` +pbjs -t static-module azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto -o azure-functions-language-worker-protobuf/src/rpc_static.js && pbts -o azure-functions-language-worker-protobuf/src/rpc.d.ts azure-functions-language-worker-protobuf/src/rpc_static.js +``` + +## Java +Maven plugin : [protobuf-maven-plugin](https://www.xolstice.org/protobuf-maven-plugin/) +In pom.xml add following under configuration for this plugin +${basedir}//azure-functions-language-worker-protobuf/src/proto + +## Python +--TODO + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/proxy_worker/protos/_src/src/proto/FunctionRpc.proto b/proxy_worker/protos/_src/src/proto/FunctionRpc.proto new file mode 100644 index 000000000..f48bc7bbe --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/FunctionRpc.proto @@ -0,0 +1,730 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_multiple_files = true; +option java_package = "com.microsoft.azure.functions.rpc.messages"; +option java_outer_classname = "FunctionProto"; +option csharp_namespace = "Microsoft.Azure.WebJobs.Script.Grpc.Messages"; +option go_package ="github.com/Azure/azure-functions-go-worker/internal/rpc"; + +package AzureFunctionsRpcMessages; + +import "google/protobuf/duration.proto"; +import "identity/ClaimsIdentityRpc.proto"; +import "shared/NullableTypes.proto"; + +// Interface exported by the server. +service FunctionRpc { + rpc EventStream (stream StreamingMessage) returns (stream StreamingMessage) {} +} + +message StreamingMessage { + // Used to identify message between host and worker + string request_id = 1; + + // Payload of the message + oneof content { + + // Worker initiates stream + StartStream start_stream = 20; + + // Host sends capabilities/init data to worker + WorkerInitRequest worker_init_request = 17; + // Worker responds after initializing with its capabilities & status + WorkerInitResponse worker_init_response = 16; + + // MESSAGE NOT USED + // Worker periodically sends empty heartbeat message to host + WorkerHeartbeat worker_heartbeat = 15; + + // Host sends terminate message to worker. + // Worker terminates if it can, otherwise host terminates after a grace period + WorkerTerminate worker_terminate = 14; + + // Host periodically sends status request to the worker + WorkerStatusRequest worker_status_request = 12; + WorkerStatusResponse worker_status_response = 13; + + // On file change event, host sends notification to worker + FileChangeEventRequest file_change_event_request = 6; + + // Worker requests a desired action (restart worker, reload function) + WorkerActionResponse worker_action_response = 7; + + // Host sends required metadata to worker to load function + FunctionLoadRequest function_load_request = 8; + // Worker responds after loading with the load result + FunctionLoadResponse function_load_response = 9; + + // Host requests a given invocation + InvocationRequest invocation_request = 4; + + // Worker responds to a given invocation + InvocationResponse invocation_response = 5; + + // Host sends cancel message to attempt to cancel an invocation. + // If an invocation is cancelled, host will receive an invocation response with status cancelled. + InvocationCancel invocation_cancel = 21; + + // Worker logs a message back to the host + RpcLog rpc_log = 2; + + FunctionEnvironmentReloadRequest function_environment_reload_request = 25; + + FunctionEnvironmentReloadResponse function_environment_reload_response = 26; + + // Ask the worker to close any open shared memory resources for a given invocation + CloseSharedMemoryResourcesRequest close_shared_memory_resources_request = 27; + CloseSharedMemoryResourcesResponse close_shared_memory_resources_response = 28; + + // Worker indexing message types + FunctionsMetadataRequest functions_metadata_request = 29; + FunctionMetadataResponse function_metadata_response = 30; + + // Host sends required metadata to worker to load functions + FunctionLoadRequestCollection function_load_request_collection = 31; + + // Host gets the list of function load responses + FunctionLoadResponseCollection function_load_response_collection = 32; + + // Host sends required metadata to worker to warmup the worker + WorkerWarmupRequest worker_warmup_request = 33; + + // Worker responds after warming up with the warmup result + WorkerWarmupResponse worker_warmup_response = 34; + + } +} + +// Process.Start required info +// connection details +// protocol type +// protocol version + +// Worker sends the host information identifying itself +message StartStream { + // id of the worker + string worker_id = 2; +} + +// Host requests the worker to initialize itself +message WorkerInitRequest { + // version of the host sending init request + string host_version = 1; + + // A map of host supported features/capabilities + map capabilities = 2; + + // inform worker of supported categories and their levels + // i.e. Worker = Verbose, Function.MyFunc = None + map log_categories = 3; + + // Full path of worker.config.json location + string worker_directory = 4; + + // base directory for function app + string function_app_directory = 5; +} + +// Worker responds with the result of initializing itself +message WorkerInitResponse { + // PROPERTY NOT USED + // TODO: Remove from protobuf during next breaking change release + string worker_version = 1; + + // A map of worker supported features/capabilities + map capabilities = 2; + + // Status of the response + StatusResult result = 3; + + // Worker metadata captured for telemetry purposes + WorkerMetadata worker_metadata = 4; +} + +message WorkerMetadata { + // The runtime/stack name + string runtime_name = 1; + + // The version of the runtime/stack + string runtime_version = 2; + + // The version of the worker + string worker_version = 3; + + // The worker bitness/architecture + string worker_bitness = 4; + + // Optional additional custom properties + map custom_properties = 5; +} + +// Used by the host to determine success/failure/cancellation +message StatusResult { + // Indicates Failure/Success/Cancelled + enum Status { + Failure = 0; + Success = 1; + Cancelled = 2; + } + + // Status for the given result + Status status = 4; + + // Specific message about the result + string result = 1; + + // Exception message (if exists) for the status + RpcException exception = 2; + + // Captured logs or relevant details can use the logs property + repeated RpcLog logs = 3; +} + +// MESSAGE NOT USED +// TODO: Remove from protobuf during next breaking change release +message WorkerHeartbeat {} + +// Warning before killing the process after grace_period +// Worker self terminates ..no response on this +message WorkerTerminate { + google.protobuf.Duration grace_period = 1; +} + +// Host notifies worker of file content change +message FileChangeEventRequest { + // Types of File change operations (See link for more info: https://msdn.microsoft.com/en-us/library/t6xf43e0(v=vs.110).aspx) + enum Type { + Unknown = 0; + Created = 1; + Deleted = 2; + Changed = 4; + Renamed = 8; + All = 15; + } + + // type for this event + Type type = 1; + + // full file path for the file change notification + string full_path = 2; + + // Name of the function affected + string name = 3; +} + +// Indicates whether worker reloaded successfully or needs a restart +message WorkerActionResponse { + // indicates whether a restart is needed, or reload successfully + enum Action { + Restart = 0; + Reload = 1; + } + + // action for this response + Action action = 1; + + // text reason for the response + string reason = 2; +} + +// Used by the host to determine worker health +message WorkerStatusRequest { +} + +// Worker responds with status message +// TODO: Add any worker relevant status to response +message WorkerStatusResponse { +} + +message FunctionEnvironmentReloadRequest { + // Environment variables from the current process + map environment_variables = 1; + // Current directory of function app + string function_app_directory = 2; +} + +message FunctionEnvironmentReloadResponse { + // After specialization, worker sends capabilities & metadata. + // Worker metadata captured for telemetry purposes + WorkerMetadata worker_metadata = 1; + + // A map of worker supported features/capabilities + map capabilities = 2; + + // Status of the response + StatusResult result = 3; +} + +// Tell the out-of-proc worker to close any shared memory maps it allocated for given invocation +message CloseSharedMemoryResourcesRequest { + repeated string map_names = 1; +} + +// Response from the worker indicating which of the shared memory maps have been successfully closed and which have not been closed +// The key (string) is the map name and the value (bool) is true if it was closed, false if not +message CloseSharedMemoryResourcesResponse { + map close_map_results = 1; +} + +// Host tells the worker to load a list of Functions +message FunctionLoadRequestCollection { + repeated FunctionLoadRequest function_load_requests = 1; +} + +// Host gets the list of function load responses +message FunctionLoadResponseCollection { + repeated FunctionLoadResponse function_load_responses = 1; +} + +// Load request of a single Function +message FunctionLoadRequest { + // unique function identifier (avoid name collisions, facilitate reload case) + string function_id = 1; + + // Metadata for the request + RpcFunctionMetadata metadata = 2; + + // A flag indicating if managed dependency is enabled or not + bool managed_dependency_enabled = 3; +} + +// Worker tells host result of reload +message FunctionLoadResponse { + // unique function identifier + string function_id = 1; + + // Result of load operation + StatusResult result = 2; + // TODO: return type expected? + + // Result of load operation + bool is_dependency_downloaded = 3; +} + +// Information on how a Function should be loaded and its bindings +message RpcFunctionMetadata { + // TODO: do we want the host's name - the language worker might do a better job of assignment than the host + string name = 4; + + // base directory for the Function + string directory = 1; + + // Script file specified + string script_file = 2; + + // Entry point specified + string entry_point = 3; + + // Bindings info + map bindings = 6; + + // Is set to true for proxy + bool is_proxy = 7; + + // Function indexing status + StatusResult status = 8; + + // Function language + string language = 9; + + // Raw binding info + repeated string raw_bindings = 10; + + // unique function identifier (avoid name collisions, facilitate reload case) + string function_id = 13; + + // A flag indicating if managed dependency is enabled or not + bool managed_dependency_enabled = 14; + + // The optional function execution retry strategy to use on invocation failures. + RpcRetryOptions retry_options = 15; + + // Properties for function metadata + // They're usually specific to a worker and largely passed along to the controller API for use + // outside the host + map properties = 16; +} + +// Host tells worker it is ready to receive metadata +message FunctionsMetadataRequest { + // base directory for function app + string function_app_directory = 1; +} + +// Worker sends function metadata back to host +message FunctionMetadataResponse { + // list of function indexing responses + repeated RpcFunctionMetadata function_metadata_results = 1; + + // status of overall metadata request + StatusResult result = 2; + + // if set to true then host will perform indexing + bool use_default_metadata_indexing = 3; +} + +// Host requests worker to invoke a Function +message InvocationRequest { + // Unique id for each invocation + string invocation_id = 1; + + // Unique id for each Function + string function_id = 2; + + // Input bindings (include trigger) + repeated ParameterBinding input_data = 3; + + // binding metadata from trigger + map trigger_metadata = 4; + + // Populates activityId, tracestate and tags from host + RpcTraceContext trace_context = 5; + + // Current retry context + RetryContext retry_context = 6; +} + +// Host sends ActivityId, traceStateString and Tags from host +message RpcTraceContext { + // This corresponds to Activity.Current?.Id + string trace_parent = 1; + + // This corresponds to Activity.Current?.TraceStateString + string trace_state = 2; + + // This corresponds to Activity.Current?.Tags + map attributes = 3; +} + +// Host sends retry context for a function invocation +message RetryContext { + // Current retry count + int32 retry_count = 1; + + // Max retry count + int32 max_retry_count = 2; + + // Exception that caused the retry + RpcException exception = 3; +} + +// Host requests worker to cancel invocation +message InvocationCancel { + // Unique id for invocation + string invocation_id = 2; + + // PROPERTY NOT USED + google.protobuf.Duration grace_period = 1; +} + +// Worker responds with status of Invocation +message InvocationResponse { + // Unique id for invocation + string invocation_id = 1; + + // Output binding data + repeated ParameterBinding output_data = 2; + + // data returned from Function (for $return and triggers with return support) + TypedData return_value = 4; + + // Status of the invocation (success/failure/canceled) + StatusResult result = 3; +} + +message WorkerWarmupRequest { + // Full path of worker.config.json location + string worker_directory = 1; +} + +message WorkerWarmupResponse { + StatusResult result = 1; +} + +// Used to encapsulate data which could be a variety of types +message TypedData { + oneof data { + string string = 1; + string json = 2; + bytes bytes = 3; + bytes stream = 4; + RpcHttp http = 5; + sint64 int = 6; + double double = 7; + CollectionBytes collection_bytes = 8; + CollectionString collection_string = 9; + CollectionDouble collection_double = 10; + CollectionSInt64 collection_sint64 = 11; + ModelBindingData model_binding_data = 12; + CollectionModelBindingData collection_model_binding_data = 13; + } +} + +// Specify which type of data is contained in the shared memory region being read +enum RpcDataType { + unknown = 0; + string = 1; + json = 2; + bytes = 3; + stream = 4; + http = 5; + int = 6; + double = 7; + collection_bytes = 8; + collection_string = 9; + collection_double = 10; + collection_sint64 = 11; +} + +// Used to provide metadata about shared memory region to read data from +message RpcSharedMemory { + // Name of the shared memory map containing data + string name = 1; + // Offset in the shared memory map to start reading data from + int64 offset = 2; + // Number of bytes to read (starting from the offset) + int64 count = 3; + // Final type to which the read data (in bytes) is to be interpreted as + RpcDataType type = 4; +} + +// Used to encapsulate collection string +message CollectionString { + repeated string string = 1; +} + +// Used to encapsulate collection bytes +message CollectionBytes { + repeated bytes bytes = 1; +} + +// Used to encapsulate collection double +message CollectionDouble { + repeated double double = 1; +} + +// Used to encapsulate collection sint64 +message CollectionSInt64 { + repeated sint64 sint64 = 1; +} + +// Used to describe a given binding on invocation +message ParameterBinding { + // Name for the binding + string name = 1; + + oneof rpc_data { + // Data for the binding + TypedData data = 2; + + // Metadata about the shared memory region to read data from + RpcSharedMemory rpc_shared_memory = 3; + } +} + +// Used to describe a given binding on load +message BindingInfo { + // Indicates whether it is an input or output binding (or a fancy inout binding) + enum Direction { + in = 0; + out = 1; + inout = 2; + } + + // Indicates the type of the data for the binding + enum DataType { + undefined = 0; + string = 1; + binary = 2; + stream = 3; + } + + // Type of binding (e.g. HttpTrigger) + string type = 2; + + // Direction of the given binding + Direction direction = 3; + + DataType data_type = 4; + + // Properties for binding metadata + map properties = 5; +} + +// Used to send logs back to the Host +message RpcLog { + // Matching ILogger semantics + // https://github.com/aspnet/Logging/blob/9506ccc3f3491488fe88010ef8b9eb64594abf95/src/Microsoft.Extensions.Logging/Logger.cs + // Level for the Log + enum Level { + Trace = 0; + Debug = 1; + Information = 2; + Warning = 3; + Error = 4; + Critical = 5; + None = 6; + } + + // Category of the log. Defaults to User if not specified. + enum RpcLogCategory { + User = 0; + System = 1; + CustomMetric = 2; + } + + // Unique id for invocation (if exists) + string invocation_id = 1; + + // TOD: This should be an enum + // Category for the log (startup, load, invocation, etc.) + string category = 2; + + // Level for the given log message + Level level = 3; + + // Message for the given log + string message = 4; + + // Id for the even associated with this log (if exists) + string event_id = 5; + + // Exception (if exists) + RpcException exception = 6; + + // json serialized property bag + string properties = 7; + + // Category of the log. Either user(default), system, or custom metric. + RpcLogCategory log_category = 8; + + // strongly-typed (ish) property bag + map propertiesMap = 9; +} + +// Encapsulates an Exception +message RpcException { + // Source of the exception + string source = 3; + + // Stack trace for the exception + string stack_trace = 1; + + // Textual message describing the exception + string message = 2; + + // Worker specifies whether exception is a user exception, + // for purpose of application insights logging. Defaults to false. + bool is_user_exception = 4; + + // Type of exception. If it's a user exception, the type is passed along to app insights. + // Otherwise, it's ignored for now. + string type = 5; +} + +// Http cookie type. Note that only name and value are used for Http requests +message RpcHttpCookie { + // Enum that lets servers require that a cookie shouldn't be sent with cross-site requests + enum SameSite { + None = 0; + Lax = 1; + Strict = 2; + ExplicitNone = 3; + } + + // Cookie name + string name = 1; + + // Cookie value + string value = 2; + + // Specifies allowed hosts to receive the cookie + NullableString domain = 3; + + // Specifies URL path that must exist in the requested URL + NullableString path = 4; + + // Sets the cookie to expire at a specific date instead of when the client closes. + // It is generally recommended that you use "Max-Age" over "Expires". + NullableTimestamp expires = 5; + + // Sets the cookie to only be sent with an encrypted request + NullableBool secure = 6; + + // Sets the cookie to be inaccessible to JavaScript's Document.cookie API + NullableBool http_only = 7; + + // Allows servers to assert that a cookie ought not to be sent along with cross-site requests + SameSite same_site = 8; + + // Number of seconds until the cookie expires. A zero or negative number will expire the cookie immediately. + NullableDouble max_age = 9; +} + +// TODO - solidify this or remove it +message RpcHttp { + string method = 1; + string url = 2; + map headers = 3; + TypedData body = 4; + map params = 10; + string status_code = 12; + map query = 15; + bool enable_content_negotiation= 16; + TypedData rawBody = 17; + repeated RpcClaimsIdentity identities = 18; + repeated RpcHttpCookie cookies = 19; + map nullable_headers = 20; + map nullable_params = 21; + map nullable_query = 22; +} + +// Message representing Microsoft.Azure.WebJobs.ParameterBindingData +// Used for hydrating SDK-type bindings in out-of-proc workers +message ModelBindingData +{ + // The version of the binding data content + string version = 1; + + // The extension source of the binding data + string source = 2; + + // The content type of the binding data content + string content_type = 3; + + // The binding data content + bytes content = 4; +} + +// Used to encapsulate collection model_binding_data +message CollectionModelBindingData { + repeated ModelBindingData model_binding_data = 1; +} + +// Retry policy which the worker sends the host when the worker indexes +// a function. +message RpcRetryOptions +{ + // The retry strategy to use. Valid values are fixed delay or exponential backoff. + enum RetryStrategy + { + exponential_backoff = 0; + fixed_delay = 1; + } + + // The maximum number of retries allowed per function execution. + // -1 means to retry indefinitely. + int32 max_retry_count = 2; + + // The delay that's used between retries when you're using a fixed delay strategy. + google.protobuf.Duration delay_interval = 3; + + // The minimum retry delay when you're using an exponential backoff strategy + google.protobuf.Duration minimum_interval = 4; + + // The maximum retry delay when you're using an exponential backoff strategy + google.protobuf.Duration maximum_interval = 5; + + RetryStrategy retry_strategy = 6; +} \ No newline at end of file diff --git a/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto b/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto new file mode 100644 index 000000000..c3945bb8a --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_package = "com.microsoft.azure.functions.rpc.messages"; + +import "shared/NullableTypes.proto"; + +// Light-weight representation of a .NET System.Security.Claims.ClaimsIdentity object. +// This is the same serialization as found in EasyAuth, and needs to be kept in sync with +// its ClaimsIdentitySlim definition, as seen in the WebJobs extension: +// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimsIdentitySlim.cs +message RpcClaimsIdentity { + NullableString authentication_type = 1; + NullableString name_claim_type = 2; + NullableString role_claim_type = 3; + repeated RpcClaim claims = 4; +} + +// Light-weight representation of a .NET System.Security.Claims.Claim object. +// This is the same serialization as found in EasyAuth, and needs to be kept in sync with +// its ClaimSlim definition, as seen in the WebJobs extension: +// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimSlim.cs +message RpcClaim { + string value = 1; + string type = 2; +} diff --git a/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto b/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto new file mode 100644 index 000000000..4fb476502 --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_package = "com.microsoft.azure.functions.rpc.messages"; + +import "google/protobuf/timestamp.proto"; + +message NullableString { + oneof string { + string value = 1; + } +} + +message NullableDouble { + oneof double { + double value = 1; + } +} + +message NullableBool { + oneof bool { + bool value = 1; + } +} + +message NullableTimestamp { + oneof timestamp { + google.protobuf.Timestamp value = 1; + } +} diff --git a/pyproject.toml b/pyproject.toml index 109e1c34b..ce8a5063b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,15 +26,20 @@ classifiers = [ "Intended Audience :: Developers" ] dependencies = [ - "azure-functions==1.23.0b3", - "python-dateutil ~=2.9.0", + "azure-functions==1.23.0", + "python-dateutil~=2.9.0", "protobuf~=3.19.3; python_version == '3.7'", - "protobuf~=5.29.0; python_version >= '3.8'", + "protobuf~=4.25.3; python_version >= '3.8' and python_version < '3.13'", + "protobuf~=5.29.0; python_version >= '3.13'", "grpcio-tools~=1.43.0; python_version == '3.7'", - "grpcio-tools~=1.70.0; python_version >= '3.8'", + "grpcio-tools~=1.59.0; python_version >= '3.8' and python_version < '3.13'", + "grpcio-tools~=1.70.0; python_version >= '3.13'", "grpcio~=1.43.0; python_version == '3.7'", - "grpcio~=1.70.0; python_version >= '3.8'", - "azurefunctions-extensions-base; python_version >= '3.8'" + "grpcio ~=1.59.0; python_version >= '3.8' and python_version < '3.13'", + "grpcio~=1.70.0; python_version >= '3.13'", + "azurefunctions-extensions-base; python_version >= '3.8'", + "test-worker==1.0.0a38; python_version >= '3.13'", + "test-worker-v1==1.0.0a11; python_version >= '3.13'" ] [project.urls] diff --git a/python/proxyV4/worker.py b/python/proxyV4/worker.py index dce5d51e6..2f899f37e 100644 --- a/python/proxyV4/worker.py +++ b/python/proxyV4/worker.py @@ -2,7 +2,6 @@ import pathlib import sys - PKGS_PATH = "/home/site/wwwroot/.python_packages" PKGS = "lib/site-packages" diff --git a/python/test/worker.py b/python/test/worker.py index a15d160ed..95790083f 100644 --- a/python/test/worker.py +++ b/python/test/worker.py @@ -1,7 +1,6 @@ import sys import os -from azure_functions_worker import main -from proxy_worker import start_worker + # Azure environment variables AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" @@ -16,7 +15,10 @@ def add_script_root_to_sys_path(): if __name__ == '__main__': add_script_root_to_sys_path() - if sys.version_info.minor >= 13: - start_worker.start() - else: + minor_version = sys.version_info[1] + if minor_version < 13: + from azure_functions_worker import main main.main() + else: + from proxy_worker import start_worker + start_worker.start() diff --git a/setup.cfg b/setup.cfg index 6f5a7fb98..5dde99ef5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,3 +18,9 @@ ignore_errors = True [mypy-azure_functions_worker._thirdparty.typing_inspect] ignore_errors = True + +[mypy-proxy_worker.protos.*] +ignore_errors = True + +[mypy-proxy_worker._thirdparty.typing_inspect] +ignore_errors = True \ No newline at end of file diff --git a/tests/extension_tests/http_v2_tests/test_http_v2.py b/tests/extension_tests/http_v2_tests/test_http_v2.py index 8c1d5b48e..514633743 100644 --- a/tests/extension_tests/http_v2_tests/test_http_v2.py +++ b/tests/extension_tests/http_v2_tests/test_http_v2.py @@ -8,11 +8,16 @@ import requests from tests.utils import testutils -from azure_functions_worker.utils.common import is_envvar_true from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST +# This app setting is only present for Python < 3.13 from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING +if sys.version_info.minor < 13: + from azure_functions_worker.utils.common import is_envvar_true +else: + from proxy_worker.utils.common import is_envvar_true + REQUEST_TIMEOUT_SEC = 5 diff --git a/tests/unittests/test_code_quality.py b/tests/unittests/test_code_quality.py index 54d1cc725..45f7bda47 100644 --- a/tests/unittests/test_code_quality.py +++ b/tests/unittests/test_code_quality.py @@ -17,7 +17,7 @@ def test_mypy(self): try: subprocess.run( - [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker'], + [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker', 'proxy_worker'], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, diff --git a/tests/utils/testutils.py b/tests/utils/testutils.py index c04b134c5..360fca2e5 100644 --- a/tests/utils/testutils.py +++ b/tests/utils/testutils.py @@ -50,18 +50,23 @@ WebHostDedicated, ) -from azure_functions_worker import dispatcher, protos -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - FileAccessorFactory, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.constants import ( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, - UNIX_SHARED_MEMORY_DIRECTORIES, -) -from azure_functions_worker.utils.common import get_app_setting, is_envvar_true +if sys.version_info.minor < 13: + from azure_functions_worker import dispatcher, protos + from azure_functions_worker.bindings.shared_memory_data_transfer import ( + FileAccessorFactory, + ) + from azure_functions_worker.bindings.shared_memory_data_transfer import ( + SharedMemoryConstants as consts, + ) + from azure_functions_worker.constants import ( + FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, + UNIX_SHARED_MEMORY_DIRECTORIES, + ) + from azure_functions_worker.utils.common import get_app_setting, is_envvar_true +else: + from proxy_worker import dispatcher, protos + from proxy_worker.utils.common import is_envvar_true + from proxy_worker.utils.app_settings import get_app_setting TESTS_ROOT = PROJECT_ROOT / 'tests' E2E_TESTS_FOLDER = pathlib.Path('endtoend') @@ -320,125 +325,126 @@ def _run_test(self, test, *args, **kwargs): if test_exception is not None: raise test_exception - -class SharedMemoryTestCase(unittest.TestCase): - """ - For tests involving shared memory data transfer usage. - """ - - def setUp(self): - self.was_shmem_env_true = is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - - os_name = platform.system() - if os_name == 'Darwin': - # If an existing AppSetting is specified, save it so it can be - # restored later - self.was_shmem_dirs = get_app_setting( - UNIX_SHARED_MEMORY_DIRECTORIES - ) - self._setUpDarwin() - elif os_name == 'Linux': - self._setUpLinux() - self.file_accessor = FileAccessorFactory.create_file_accessor() - - def tearDown(self): - os_name = platform.system() - if os_name == 'Darwin': - self._tearDownDarwin() - if self.was_shmem_dirs is not None: - # If an AppSetting was set before the tests ran, restore it back - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) - elif os_name == 'Linux': - self._tearDownLinux() - - if not self.was_shmem_env_true: - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - - def get_new_mem_map_name(self): - return str(uuid.uuid4()) - - def get_random_bytes(self, num_bytes): - return bytearray(random.getrandbits(8) for _ in range(num_bytes)) - - def get_random_string(self, num_chars): - return ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - - def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: +# This is not supported in 3.13+ +if sys.version_info.minor < 13: + class SharedMemoryTestCase(unittest.TestCase): """ - Check if uuid_to_test is a valid UUID. - Reference: https://stackoverflow.com/a/33245493/3132415 + For tests involving shared memory data transfer usage. """ - try: - uuid_obj = uuid.UUID(uuid_to_test, version=version) - except ValueError: - return False - return str(uuid_obj) == uuid_to_test - def _createSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - if not os.path.exists(temp_dir_path): - os.makedirs(temp_dir_path) + def setUp(self): + self.was_shmem_env_true = is_envvar_true( + FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) + os.environ.update( + {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) + + os_name = platform.system() + if os_name == 'Darwin': + # If an existing AppSetting is specified, save it so it can be + # restored later + self.was_shmem_dirs = get_app_setting( + UNIX_SHARED_MEMORY_DIRECTORIES + ) + self._setUpDarwin() + elif os_name == 'Linux': + self._setUpLinux() + self.file_accessor = FileAccessorFactory.create_file_accessor() + + def tearDown(self): + os_name = platform.system() + if os_name == 'Darwin': + self._tearDownDarwin() + if self.was_shmem_dirs is not None: + # If an AppSetting was set before the tests ran, restore it back + os.environ.update( + {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) + elif os_name == 'Linux': + self._tearDownLinux() + + if not self.was_shmem_env_true: + os.environ.update( + {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - def _deleteSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - shutil.rmtree(temp_dir_path) + def get_new_mem_map_name(self): + return str(uuid.uuid4()) - def _setUpLinux(self): - self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + def get_random_bytes(self, num_bytes): + return bytearray(random.getrandbits(8) for _ in range(num_bytes)) - def _tearDownLinux(self): - self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + def get_random_string(self, num_chars): + return ''.join(random.choices(string.ascii_uppercase + string.digits, + k=num_chars)) - def _setUpDarwin(self): - """ - Create a RAM disk on macOS. - Ref: https://stackoverflow.com/a/2033417/3132415 - """ - size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) - size = 2048 * size_in_mb - # The following command returns the name of the created disk - cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create ram disk with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - disk_name = result.stdout.strip().decode() - # We create a volume on the disk created above and mount it - volume_name = 'shm' - cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - directory = f'/Volumes/{volume_name}' - self.created_directories = [directory] - # Create directories in the volume for shared memory maps - self._createSharedMemoryDirectories(self.created_directories) - # Override the AppSetting for the duration of this test so the - # FileAccessorUnix can use these directories for creating memory maps - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} - ) + def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: + """ + Check if uuid_to_test is a valid UUID. + Reference: https://stackoverflow.com/a/33245493/3132415 + """ + try: + uuid_obj = uuid.UUID(uuid_to_test, version=version) + except ValueError: + return False + return str(uuid_obj) == uuid_to_test + + def _createSharedMemoryDirectories(self, directories): + for temp_dir in directories: + temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) + if not os.path.exists(temp_dir_path): + os.makedirs(temp_dir_path) + + def _deleteSharedMemoryDirectories(self, directories): + for temp_dir in directories: + temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) + shutil.rmtree(temp_dir_path) + + def _setUpLinux(self): + self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + + def _tearDownLinux(self): + self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + + def _setUpDarwin(self): + """ + Create a RAM disk on macOS. + Ref: https://stackoverflow.com/a/2033417/3132415 + """ + size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) + size = 2048 * size_in_mb + # The following command returns the name of the created disk + cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] + result = subprocess.run(cmd, stdout=subprocess.PIPE) + if result.returncode != 0: + raise IOError(f'Cannot create ram disk with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') + disk_name = result.stdout.strip().decode() + # We create a volume on the disk created above and mount it + volume_name = 'shm' + cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] + result = subprocess.run(cmd, stdout=subprocess.PIPE) + if result.returncode != 0: + raise IOError(f'Cannot create volume with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') + directory = f'/Volumes/{volume_name}' + self.created_directories = [directory] + # Create directories in the volume for shared memory maps + self._createSharedMemoryDirectories(self.created_directories) + # Override the AppSetting for the duration of this test so the + # FileAccessorUnix can use these directories for creating memory maps + os.environ.update( + {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} + ) - def _tearDownDarwin(self): - # Delete the directories containing shared memory maps - self._deleteSharedMemoryDirectories(self.created_directories) - # Unmount the volume used for shared memory maps - volume_name = 'shm' - cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ - "| xargs -0 umount -f" - result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) - if result.returncode != 0: - raise IOError(f'Cannot delete volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') + def _tearDownDarwin(self): + # Delete the directories containing shared memory maps + self._deleteSharedMemoryDirectories(self.created_directories) + # Unmount the volume used for shared memory maps + volume_name = 'shm' + cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ + "| xargs -0 umount -f" + result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) + if result.returncode != 0: + raise IOError(f'Cannot delete volume with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') class _MockWebHostServicer(protos.FunctionRpcServicer):