diff --git a/.azuredevops/dependabot.yml b/.azuredevops/dependabot.yml
new file mode 100644
index 000000000..5f6217a91
--- /dev/null
+++ b/.azuredevops/dependabot.yml
@@ -0,0 +1,4 @@
+# Mirrored repository. We use dependabot via GitHub, not Azure DevOps.
+version: 2
+enable-security-updates: false
+enable-campaigned-updates: false
\ No newline at end of file
diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml
index 40095352a..e4d22dde7 100644
--- a/eng/ci/official-build.yml
+++ b/eng/ci/official-build.yml
@@ -77,7 +77,7 @@ extends:
dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self
- - stage: RunWorkerLinuxConsumptionTests
+ - stage: RunWorkerFlexConsumptionTests
dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/ci-lc-tests.yml@self
diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml
index 69ae8576f..17d8000c3 100644
--- a/eng/ci/public-build.yml
+++ b/eng/ci/public-build.yml
@@ -60,6 +60,12 @@ extends:
PROJECT_DIRECTORY: 'workers'
# Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
+ - stage: CheckPythonWorkerDependencies
+ dependsOn: BuildPythonWorker
+ jobs:
+ - template: /eng/templates/jobs/ci-dependency-check.yml@self
+ parameters:
+ PoolName: 1es-pool-azfunc-public
- stage: RunWorkerUnitTests
dependsOn: BuildPythonWorker
jobs:
diff --git a/eng/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/eng/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec
index 8adbb7c6d..6531c5dfb 100644
--- a/eng/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec
+++ b/eng/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec
@@ -10,14 +10,6 @@
© .NET Foundation. All rights reserved.
-
-
-
-
-
-
-
-
diff --git a/eng/pack/templates/macos_64_env_gen.yml b/eng/pack/templates/macos_64_env_gen.yml
index af1a9f699..212c56fc7 100644
--- a/eng/pack/templates/macos_64_env_gen.yml
+++ b/eng/pack/templates/macos_64_env_gen.yml
@@ -70,7 +70,6 @@ steps:
cd workers
pip-audit -r requirements.txt
displayName: 'Run vulnerability scan'
- condition: ne(variables['pythonVersion'], '3.7')
- task: CopyFiles@2
inputs:
contents: '$(workerPath)'
@@ -106,6 +105,7 @@ steps:
$expected = @(
".artifactignore",
"azure",
+ "azurefunctions",
"azure_functions_worker",
"azure_functions_worker/protos/FunctionRpc_pb2.py",
"azure_functions_worker/protos/FunctionRpc_pb2_grpc.py",
@@ -124,13 +124,6 @@ steps:
"werkzeug",
"worker.py"
)
- # Add azurefunctions/ conditionally
- if ("$(minorVersion)" -ne "7") {
- Write-Host "Including azurefunctions/ in expected list (minorVersion=$(minorVersion))"
- $expected += "azurefunctions"
- } else {
- Write-Host "Skipping azurefunctions/ (minorVersion=7)"
- }
$missing = @()
diff --git a/eng/pack/templates/nix_arm64_env_gen.yml b/eng/pack/templates/nix_arm64_env_gen.yml
index a6a92b00f..844fbac1b 100644
--- a/eng/pack/templates/nix_arm64_env_gen.yml
+++ b/eng/pack/templates/nix_arm64_env_gen.yml
@@ -76,7 +76,6 @@ steps:
cd workers
pip-audit -r requirements.txt
displayName: 'Run vulnerability scan'
- condition: ne(variables['pythonVersion'], '3.7')
- task: CopyFiles@2
inputs:
contents: '$(workerPath)'
@@ -112,6 +111,7 @@ steps:
$expected = @(
".artifactignore",
"azure",
+ "azurefunctions",
"azure_functions_worker",
"azure_functions_worker/protos/FunctionRpc_pb2.py",
"azure_functions_worker/protos/FunctionRpc_pb2_grpc.py",
@@ -130,13 +130,6 @@ steps:
"werkzeug",
"worker.py"
)
- # Add azurefunctions/ conditionally
- if ("$(minorVersion)" -ne "7") {
- Write-Host "Including azurefunctions/ in expected list (minorVersion=$(minorVersion))"
- $expected += "azurefunctions"
- } else {
- Write-Host "Skipping azurefunctions/ (minorVersion=7)"
- }
$missing = @()
diff --git a/eng/pack/templates/nix_env_gen.yml b/eng/pack/templates/nix_env_gen.yml
index cb78c8c5d..643ed544e 100644
--- a/eng/pack/templates/nix_env_gen.yml
+++ b/eng/pack/templates/nix_env_gen.yml
@@ -70,7 +70,6 @@ steps:
cd workers
pip-audit -r requirements.txt
displayName: 'Run vulnerability scan'
- condition: ne(variables['pythonVersion'], '3.7')
- task: CopyFiles@2
inputs:
contents: '$(workerPath)'
@@ -107,6 +106,7 @@ steps:
".artifactignore",
"azure",
"azure_functions_worker",
+ "azurefunctions",
"azure_functions_worker/protos/FunctionRpc_pb2.py",
"azure_functions_worker/protos/FunctionRpc_pb2_grpc.py",
"azure_functions_worker/protos/_src",
@@ -124,13 +124,6 @@ steps:
"werkzeug",
"worker.py"
)
- # Add azurefunctions/ conditionally
- if ("$(minorVersion)" -ne "7") {
- Write-Host "Including azurefunctions/ in expected list (minorVersion=$(minorVersion))"
- $expected += "azurefunctions"
- } else {
- Write-Host "Skipping azurefunctions/ (minorVersion=7)"
- }
$missing = @()
diff --git a/eng/pack/templates/win_env_gen.yml b/eng/pack/templates/win_env_gen.yml
index d32cef19a..43c950b18 100644
--- a/eng/pack/templates/win_env_gen.yml
+++ b/eng/pack/templates/win_env_gen.yml
@@ -69,7 +69,6 @@ steps:
cd workers
pip-audit -r requirements.txt
displayName: 'Run vulnerability scan'
- condition: ne(variables['pythonVersion'], '3.7')
- task: CopyFiles@2
inputs:
contents: '$(workerPath)'
@@ -105,6 +104,7 @@ steps:
$expected = @(
".artifactignore",
"azure",
+ "azurefunctions",
"azure_functions_worker",
"azure_functions_worker/protos/FunctionRpc_pb2.py",
"azure_functions_worker/protos/FunctionRpc_pb2_grpc.py",
@@ -123,13 +123,6 @@ steps:
"werkzeug",
"worker.py"
)
- # Add azurefunctions/ conditionally
- if ("$(minorVersion)" -ne "7") {
- Write-Host "Including azurefunctions/ in expected list (minorVersion=$(minorVersion))"
- $expected += "azurefunctions"
- } else {
- Write-Host "Skipping azurefunctions/ (minorVersion=7)"
- }
$missing = @()
diff --git a/eng/scripts/install-dependencies.sh b/eng/scripts/install-dependencies.sh
index 3f4a16961..bb180201e 100644
--- a/eng/scripts/install-dependencies.sh
+++ b/eng/scripts/install-dependencies.sh
@@ -6,13 +6,10 @@ python -m pip install -e runtimes/v1
python -m pip install -U azure-functions --pre
python -m pip install -U -e $2/[dev]
-if [[ $1 != "3.7" ]]; then
- python -m pip install --pre -U -e $2/[test-http-v2]
-fi
-if [[ $1 != "3.7" && $1 != "3.8" ]]; then
- python -m pip install --pre -U -e $2/[test-deferred-bindings]
+python -m pip install --pre -U -e $2/[test-http-v2]
+python -m pip install --pre -U -e $2/[test-deferred-bindings]
+
+SERVICEBUS_DIR="./servicebus_dir"
+python -m pip install --pre -U --target "$SERVICEBUS_DIR" azurefunctions-extensions-bindings-servicebus==1.0.0b2
+python -c "import sys; sys.path.insert(0, '$SERVICEBUS_DIR'); import azurefunctions.extensions.bindings.servicebus as sb; print('servicebus version:', sb.__version__)"
- SERVICEBUS_DIR="./servicebus_dir"
- python -m pip install --pre -U --target "$SERVICEBUS_DIR" azurefunctions-extensions-bindings-servicebus==1.0.0b2
- python -c "import sys; sys.path.insert(0, '$SERVICEBUS_DIR'); import azurefunctions.extensions.bindings.servicebus as sb; print('servicebus version:', sb.__version__)"
-fi
diff --git a/eng/scripts/test-extensions.sh b/eng/scripts/test-extensions.sh
index 43fb9736f..dbfb1b7cc 100644
--- a/eng/scripts/test-extensions.sh
+++ b/eng/scripts/test-extensions.sh
@@ -1,13 +1,9 @@
#!/bin/bash
python -m pip install --upgrade pip
-if [[ $2 != "3.7" ]]; then
- python -m pip install -e $1/PythonExtensionArtifact/$3
- python -m pip install --pre -e workers/[test-http-v2]
-fi
-if [[ $2 != "3.7" && $2 != "3.8" ]]; then
- python -m pip install -e $1/PythonExtensionArtifact/$3
- python -m pip install --pre -U -e workers/[test-deferred-bindings]
-fi
+
+python -m pip install -e $1/PythonExtensionArtifact/$3
+python -m pip install --pre -e workers/[test-http-v2]
+python -m pip install --pre -U -e workers/[test-deferred-bindings]
python -m pip install -U -e workers/[dev]
\ No newline at end of file
diff --git a/eng/scripts/test-sdk.sh b/eng/scripts/test-sdk.sh
index 7c1fbb728..8ef793311 100644
--- a/eng/scripts/test-sdk.sh
+++ b/eng/scripts/test-sdk.sh
@@ -4,9 +4,5 @@ python -m pip install --upgrade pip
python -m pip install -e $1/PythonSdkArtifact
python -m pip install -e workers/[dev]
-if [[ $2 != "3.7" ]]; then
- python -m pip install --pre -U -e workers/[test-http-v2]
-fi
-if [[ $2 != "3.7" && $2 != "3.8" ]]; then
- python -m pip install --pre -U -e workers/[test-deferred-bindings]
-fi
\ No newline at end of file
+python -m pip install --pre -U -e workers/[test-http-v2]
+python -m pip install --pre -U -e workers/[test-deferred-bindings]
\ No newline at end of file
diff --git a/eng/templates/jobs/ci-dependency-check.yml b/eng/templates/jobs/ci-dependency-check.yml
new file mode 100644
index 000000000..3ce00f954
--- /dev/null
+++ b/eng/templates/jobs/ci-dependency-check.yml
@@ -0,0 +1,81 @@
+jobs:
+ - job: "TestPython"
+ displayName: "Run Dependency Checks"
+
+ pool:
+ name: ${{ parameters.PoolName }}
+ image: 1es-ubuntu-22.04
+ os: linux
+
+ strategy:
+ matrix:
+ Python39:
+ PYTHON_VERSION: '3.9'
+ Python310:
+ PYTHON_VERSION: '3.10'
+ Python311:
+ PYTHON_VERSION: '3.11'
+ Python312:
+ PYTHON_VERSION: '3.12'
+ Python313:
+ PYTHON_VERSION: '3.13'
+ Python314:
+ PYTHON_VERSION: '3.14'
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: $(PYTHON_VERSION)
+ - powershell: |
+ $PY_VER = "$(PYTHON_VERSION)"
+ Write-Host "Python version: $PY_VER"
+
+ # Extract minor version as integers
+ $versionParts = $PY_VER.Split('.')
+ $PY_MINOR = [int]$versionParts[1]
+ Write-Host "Minor version: $PY_MINOR"
+ Write-Host "##vso[task.setvariable variable=minorVersion;]$PY_MINOR"
+
+ # Set build-related variables based on Python minor version
+ if( $PY_MINOR -ge 13 )
+ {
+ Write-Host "##vso[task.setvariable variable=proxyWorker;]true"
+ }
+ else
+ {
+ Write-Host "##vso[task.setvariable variable=proxyWorker;]false"
+ }
+ displayName: 'Set necessary variables'
+ - bash: |
+ echo "Checking azure_functions_worker (Python < 3.13)..."
+ cd workers
+ pip install . invoke
+ cd tests
+ python -m invoke -c test_setup build-protos
+ cd ..
+ python -c "import pkgutil, importlib; [importlib.import_module(f'azure_functions_worker.{name}') for _, name, _ in pkgutil.walk_packages(['azure_functions_worker'])]"
+ displayName: 'Python Azure Functions Worker: check for missing dependencies'
+ condition: eq(variables['proxyWorker'], false)
+ - bash: |
+ echo "Checking proxy_worker (Python >= 3.13)..."
+ cd workers
+ pip install . invoke
+ cd tests
+ python -m invoke -c test_setup build-protos
+ cd ..
+ python -c "import pkgutil, importlib; [importlib.import_module(f'proxy_worker.{name}') for _, name, _ in pkgutil.walk_packages(['proxy_worker'])]"
+ displayName: 'Python Proxy Worker: check for missing dependencies'
+ condition: eq(variables['proxyWorker'], true)
+ - bash: |
+ echo "Checking V1 Library Worker (Python >= 3.13)..."
+ cd runtimes/v1
+ pip install .
+ python -c "import pkgutil, importlib; [importlib.import_module(f'azure_functions_runtime_v1.{name}') for _, name, _ in pkgutil.walk_packages(['azure_functions_runtime_v1'])]"
+ displayName: 'Python Library V1: check for missing dependencies'
+ condition: eq(variables['proxyWorker'], true)
+ - bash: |
+ echo "Checking V2 Library Worker (Python >= 3.13)..."
+ cd runtimes/v2
+ pip install .
+ python -c "import pkgutil, importlib; [importlib.import_module(f'azure_functions_runtime.{name}') for _, name, _ in pkgutil.walk_packages(['azure_functions_runtime'])]"
+ displayName: 'Python Library V2: check for missing dependencies'
+ condition: eq(variables['proxyWorker'], true)
diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml
index 0df9b005c..88ef9f067 100644
--- a/eng/templates/jobs/ci-emulator-tests.yml
+++ b/eng/templates/jobs/ci-emulator-tests.yml
@@ -22,6 +22,8 @@ jobs:
PYTHON_VERSION: '3.12'
Python313:
PYTHON_VERSION: '3.13'
+ Python314:
+ PYTHON_VERSION: '3.14'
steps:
- task: UsePythonVersion@0
inputs:
diff --git a/eng/templates/jobs/ci-library-unit-tests.yml b/eng/templates/jobs/ci-library-unit-tests.yml
index 94a089f1b..f82cf5850 100644
--- a/eng/templates/jobs/ci-library-unit-tests.yml
+++ b/eng/templates/jobs/ci-library-unit-tests.yml
@@ -32,5 +32,5 @@ jobs:
python -m pytest -q --dist loadfile --reruns 4 --instafail --cov=./${{ parameters.PROJECT_DIRECTORY }} --cov-report xml --cov-branch tests/unittests
displayName: "Running $(PYTHON_VERSION) Unit Tests"
env:
- AzureWebJobsStorage: $(LinuxStorageConnectionString312)
+ AZURE_STORAGE_CONNECTION_STRING: $(AZURE_STORAGE_CONNECTION_STRING)
workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
\ No newline at end of file
diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml
index 6e70b8331..5eadfb321 100644
--- a/eng/templates/jobs/ci-unit-tests.yml
+++ b/eng/templates/jobs/ci-unit-tests.yml
@@ -22,6 +22,8 @@ jobs:
PYTHON_VERSION: '3.12'
Python313:
PYTHON_VERSION: '3.13'
+ Python314:
+ PYTHON_VERSION: '3.14'
steps:
- task: UsePythonVersion@0
inputs:
diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml
index 900ab0ecf..f6e469240 100644
--- a/eng/templates/official/jobs/build-artifacts.yml
+++ b/eng/templates/official/jobs/build-artifacts.yml
@@ -7,12 +7,6 @@ jobs:
os: windows
strategy:
matrix:
- Python37V4:
- pythonVersion: '3.7'
- normalizedPythonVersion: '3.7'
- Python38V4:
- pythonVersion: '3.8'
- normalizedPythonVersion: '3.8'
Python39V4:
pythonVersion: '3.9'
normalizedPythonVersion: '3.9'
@@ -29,7 +23,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
@@ -52,12 +46,6 @@ jobs:
os: windows
strategy:
matrix:
- Python37V4:
- pythonVersion: '3.7'
- normalizedPythonVersion: '3.7'
- Python38V4:
- pythonVersion: '3.8'
- normalizedPythonVersion: '3.8'
Python39V4:
pythonVersion: '3.9'
normalizedPythonVersion: '3.9'
@@ -74,7 +62,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
@@ -97,12 +85,6 @@ jobs:
os: linux
strategy:
matrix:
- Python37V4:
- pythonVersion: '3.7'
- normalizedPythonVersion: '3.7'
- Python38V4:
- pythonVersion: '3.8'
- normalizedPythonVersion: '3.8'
Python39V4:
pythonVersion: '3.9'
normalizedPythonVersion: '3.9'
@@ -119,7 +101,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
@@ -141,12 +123,6 @@ jobs:
os: macOS
strategy:
matrix:
- Python37V4:
- pythonVersion: '3.7'
- normalizedPythonVersion: '3.7'
- Python38V4:
- pythonVersion: '3.8'
- normalizedPythonVersion: '3.8'
Python39V4:
pythonVersion: '3.9'
normalizedPythonVersion: '3.9'
@@ -163,7 +139,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
@@ -201,7 +177,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
@@ -239,7 +215,7 @@ jobs:
pythonVersion: '3.13'
normalizedPythonVersion: '3.13'
Python314V4:
- pythonVersion: '3.14.0-rc.2'
+ pythonVersion: '3.14'
normalizedPythonVersion: '3.14'
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
diff --git a/eng/templates/official/jobs/ci-docker-consumption-tests.yml b/eng/templates/official/jobs/ci-docker-consumption-tests.yml
index e5653455b..c4af5c014 100644
--- a/eng/templates/official/jobs/ci-docker-consumption-tests.yml
+++ b/eng/templates/official/jobs/ci-docker-consumption-tests.yml
@@ -15,30 +15,27 @@ jobs:
Python39:
PYTHON_VERSION: '3.9'
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
SQL_CONNECTION: $(LinuxSqlConnectionString39)
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
Python310:
PYTHON_VERSION: '3.10'
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
SQL_CONNECTION: $(LinuxSqlConnectionString310)
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
Python311:
PYTHON_VERSION: '3.11'
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
SQL_CONNECTION: $(LinuxSqlConnectionString311)
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
+ Python312:
+ PYTHON_VERSION: '3.12'
+ STORAGE_CONNECTION: $(LinuxStorageConnectionString312)
+ SQL_CONNECTION: $(LinuxSqlConnectionString312)
+ EVENTGRID_URI: $(LinuxEventGridTopicUriString312)
+ EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312)
steps:
- task: UsePythonVersion@0
@@ -56,9 +53,7 @@ jobs:
env:
CONSUMPTION_DOCKER_TEST: "true"
AzureWebJobsStorage: $(STORAGE_CONNECTION)
- AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
- AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
- AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
+ STORAGE_CONNECTION: $(STORAGE_CONNECTION)
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
diff --git a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
index 728caa42b..dff9f5d1f 100644
--- a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
+++ b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
@@ -15,30 +15,27 @@ jobs:
Python39:
PYTHON_VERSION: '3.9'
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
SQL_CONNECTION: $(LinuxSqlConnectionString39)
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
Python310:
PYTHON_VERSION: '3.10'
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
SQL_CONNECTION: $(LinuxSqlConnectionString310)
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
Python311:
PYTHON_VERSION: '3.11'
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
SQL_CONNECTION: $(LinuxSqlConnectionString311)
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
+ Python312:
+ PYTHON_VERSION: '3.12'
+ STORAGE_CONNECTION: $(LinuxStorageConnectionString312)
+ SQL_CONNECTION: $(LinuxSqlConnectionString312)
+ EVENTGRID_URI: $(LinuxEventGridTopicUriString312)
+ EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312)
steps:
- task: UsePythonVersion@0
@@ -56,9 +53,7 @@ jobs:
env:
DEDICATED_DOCKER_TEST: "true"
AzureWebJobsStorage: $(STORAGE_CONNECTION)
- AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
- AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
- AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
+ STORAGE_CONNECTION: $(STORAGE_CONNECTION)
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml
index 12b574170..ab1860f45 100644
--- a/eng/templates/official/jobs/ci-e2e-tests.yml
+++ b/eng/templates/official/jobs/ci-e2e-tests.yml
@@ -15,45 +15,36 @@ jobs:
Python39:
PYTHON_VERSION: '3.9'
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
SQL_CONNECTION: $(LinuxSqlConnectionString39)
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
Python310:
PYTHON_VERSION: '3.10'
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
SQL_CONNECTION: $(LinuxSqlConnectionString310)
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
Python311:
PYTHON_VERSION: '3.11'
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
SQL_CONNECTION: $(LinuxSqlConnectionString311)
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
Python312:
PYTHON_VERSION: '3.12'
STORAGE_CONNECTION: $(LinuxStorageConnectionString312)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312)
SQL_CONNECTION: $(LinuxSqlConnectionString312)
EVENTGRID_URI: $(LinuxEventGridTopicUriString312)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312)
Python313:
PYTHON_VERSION: '3.13'
STORAGE_CONNECTION: $(LinuxStorageConnectionString312)
- COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312)
- EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312)
- SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312)
+ SQL_CONNECTION: $(LinuxSqlConnectionString312)
+ EVENTGRID_URI: $(LinuxEventGridTopicUriString312)
+ EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312)
+ Python314:
+ PYTHON_VERSION: '3.14'
+ STORAGE_CONNECTION: $(LinuxStorageConnectionString312)
SQL_CONNECTION: $(LinuxSqlConnectionString312)
EVENTGRID_URI: $(LinuxEventGridTopicUriString312)
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312)
@@ -138,9 +129,7 @@ jobs:
python -m pytest -q --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend
env:
AzureWebJobsStorage: $(STORAGE_CONNECTION)
- AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
- AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
- AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
+ STORAGE_CONNECTION: $(STORAGE_CONNECTION)
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml
index eed1db324..8d0d695ec 100644
--- a/eng/templates/official/jobs/ci-lc-tests.yml
+++ b/eng/templates/official/jobs/ci-lc-tests.yml
@@ -3,42 +3,29 @@ parameters:
jobs:
- job: "TestPython"
- displayName: "Run Python Linux Consumption Tests"
+ displayName: "Run Python Flex Consumption Tests"
pool:
name: 1es-pool-azfunc
image: 1es-ubuntu-22.04
os: linux
-
+
strategy:
matrix:
- Python39:
- PYTHON_VERSION: '3.9'
Python310:
PYTHON_VERSION: '3.10'
Python311:
PYTHON_VERSION: '3.11'
Python312:
PYTHON_VERSION: '3.12'
+ Python313:
+ PYTHON_VERSION: '3.13'
+ Python314:
+ PYTHON_VERSION: '3.14'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
-
- - bash: |
- # Start Azurite storage emulator in the background
- docker run -d -p 10000:10000 -p 10001:10001 -p 10002:10002 \
- --name azurite-storage \
- mcr.microsoft.com/azure-storage/azurite:latest \
- azurite --blobHost 0.0.0.0 --queueHost 0.0.0.0 --tableHost 0.0.0.0
-
- # Wait for Azurite to be ready
- sleep 5
-
- # Verify Azurite is running
- docker ps | grep azurite-storage
- displayName: 'Start Azurite Storage Emulator'
- condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
- bash: |
python -m pip install --upgrade pip
@@ -49,71 +36,12 @@ jobs:
displayName: 'Install dependencies and the worker'
# Skip the installation stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
-
- - bash: |
- # Install Azure CLI (if not already present)
- curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
-
- # Create the apps container in Azurite
- az storage container create \
- --name apps \
- --connection-string "$(AZURE_STORAGE_CONNECTION_STRING)"
-
- # Upload all function app packages to the container
- FUNCTION_APPS_DIR="$(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}/tests/consumption_tests/function_app_zips"
- for zipfile in "$FUNCTION_APPS_DIR"/*.zip; do
- filename=$(basename "$zipfile")
- echo "Uploading $filename..."
- az storage blob upload \
- --container-name apps \
- --name "$filename" \
- --file "$zipfile" \
- --connection-string "$(AZURE_STORAGE_CONNECTION_STRING)" \
- --overwrite
- done
-
- # Generate a container-level SAS token valid for 1 day
- SAS_TOKEN=$(az storage container generate-sas \
- --name apps \
- --permissions r \
- --expiry $(date -u -d '+1 day' +%Y-%m-%dT%H:%M:%SZ) \
- --connection-string "$(AZURE_STORAGE_CONNECTION_STRING)" \
- --output tsv)
-
- echo "##vso[task.setvariable variable=CONTAINER_SAS_TOKEN]$SAS_TOKEN"
-
- # List blobs in the container to verify uploads
- echo "Verifying uploaded blobs in 'apps' container..."
- az storage blob list \
- --container-name apps \
- --connection-string "$(AZURE_STORAGE_CONNECTION_STRING)" \
- --output table
-
- env:
- AZURE_STORAGE_CONNECTION_STRING: $(AZURE_STORAGE_CONNECTION_STRING)
-
- displayName: 'Setup Function App Packages in Azurite'
- condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
-
- - powershell: |
- Write-Host "CONTAINER_SAS_TOKEN: $(CONTAINER_SAS_TOKEN)"
- displayName: 'Display CONTAINER_SAS_TOKEN variable'
- # Skip as this variable is defined only for non-release branches
- condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
- bash: |
python -m pytest --dist loadfile -vv --instafail tests/consumption_tests
env:
AzureWebJobsStorage: $(AZURE_STORAGE_CONNECTION_STRING)
_DUMMY_CONT_KEY: $(_DUMMY_CONT_KEY)
- CONTAINER_SAS_TOKEN: $(CONTAINER_SAS_TOKEN)
- displayName: "Running $(PYTHON_VERSION) Linux Consumption tests"
+ displayName: "Running $(PYTHON_VERSION) Flex Consumption tests"
workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
-
- - bash: |
- # Cleanup: Stop and remove Azurite container
- docker stop azurite-storage || true
- docker rm azurite-storage || true
- displayName: 'Cleanup Azurite Storage Emulator'
- condition: always()
\ No newline at end of file
diff --git a/eng/templates/official/jobs/publish-library-release.yml b/eng/templates/official/jobs/publish-library-release.yml
index 8a49433f6..301edd43a 100644
--- a/eng/templates/official/jobs/publish-library-release.yml
+++ b/eng/templates/official/jobs/publish-library-release.yml
@@ -57,9 +57,9 @@ jobs:
dependsOn: ['CheckGitHubRelease']
displayName: 'PyPI Package'
steps:
- - script: |
- echo "##vso[task.setvariable variable=BranchName]refs/heads/${{ parameters.BRANCH_NAME }}/$(NewLibraryVersion)"
- displayName: 'Set branch variable'
+ - powershell: |
+ Write-Host "##vso[task.setvariable variable=BranchName]refs/heads/${{ parameters.BRANCH_NAME }}/$(NewWorkerVersion)"
+ displayName: "Set branch variable"
- powershell: |
Write-Host "BranchName: $(BranchName)"
displayName: 'Display BranchName variable'
@@ -74,13 +74,13 @@ jobs:
branchName: '$(BranchName)'
allowPartiallySucceededBuilds: true
allowFailedBuilds: true
- targetPath: '$(Pipeline.Workspace)/PythonWorkerArtifact'
+ targetPath: 'PythonRuntimeArtifact'
- task: UsePythonVersion@0
displayName: 'Use Python 3.13'
inputs:
versionSpec: 3.13
- powershell: |
- $newLibraryVersion = "$(NewLibraryVersion)"
+ $newLibraryVersion = "$(NewWorkerVersion)"
$pypiToken = "$(PypiToken)"
# Setup local Python environment
@@ -89,7 +89,7 @@ jobs:
pip install twine
# Publish artifacts to PyPi
- twine upload --repository-url https://upload.pypi.org/legacy/ --username "__token__" --password "$pypiToken" PythonRuntimeArtifact/${{ parameters.PROJECT_DIRECTORY }}/dist/*
+ twine upload --repository-url https://upload.pypi.org/legacy/ --username "__token__" --password "$pypiToken" PythonRuntimeArtifact/${{ parameters.PROJECT_NAME }}/${{ parameters.PROJECT_DIRECTORY }}/dist/*
Start-Sleep -Seconds 3
# Checking if the new version is uploaded
diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml
index 57e5a78a1..2cba69900 100644
--- a/eng/templates/official/jobs/publish-release.yml
+++ b/eng/templates/official/jobs/publish-release.yml
@@ -81,7 +81,6 @@ jobs:
allowFailedBuilds: true
targetPath: '$(Pipeline.Workspace)/PythonWorkerArtifact'
-
- job: "CheckNugetPackageContent"
dependsOn: ['PublishNuget']
displayName: '(Manual) Check Nuget Package Content'
@@ -120,20 +119,16 @@ jobs:
# Modify Python Worker Version in eng\build\python.props
Write-Host "Replacing eng\build\python.props"
- ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props
-
- # Modify Python Worker Version in test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj
- Write-Host "Replacing test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj"
- ((Get-Content test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") + "`n" | Set-Content -NoNewline test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj
+ ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" VersionOverride=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" VersionOverride=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props
# Modify release_notes.md
Write-Host "Adding a new entry in release_notes.md"
- Add-Content -Path release_notes.md -Value "`n- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)"
+ Add-Content -Path release_notes.md -Value "- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)"
+
# Commit Python Version
Write-Host "Pushing $newBranch to host repo"
git add eng\build\Workers.Python.props
- git add test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj
git add release_notes.md
git commit -m "Update Python Worker Version to $newWorkerVersion"
git push origin $newBranch
diff --git a/eng/templates/shared/github-release-branch.yml b/eng/templates/shared/github-release-branch.yml
index c228c8c36..eb4967f8d 100644
--- a/eng/templates/shared/github-release-branch.yml
+++ b/eng/templates/shared/github-release-branch.yml
@@ -26,7 +26,7 @@ steps:
{
# Modify Runtime Version in workers/pyproject.toml
Write-Host "Replacing Runtime version in worker's pyproject.toml"
- ((Get-Content workers/pyproject.toml) -replace '"${{ parameters.PROJECT_NAME }}==[^";]+', "`"${{ parameters.PROJECT_NAME }}==$newLibraryVersion") -join "`n" | Set-Content -NoNewline workers/pyproject.toml
+ ((Get-Content workers/pyproject.toml) -replace '"${{ parameters.PROJECT_NAME }}==[^";]+', "`"${{ parameters.PROJECT_NAME }}==$newWorkerVersion") -join "`n" | Set-Content -NoNewline workers/pyproject.toml
# Change $versionFile version
Write-Host "Change version number in version.py to $newWorkerVersion"
diff --git a/runtimes/v1/azure_functions_runtime_v1/__init__.py b/runtimes/v1/azure_functions_runtime_v1/__init__.py
index eb8d5df6d..2e20d51d1 100644
--- a/runtimes/v1/azure_functions_runtime_v1/__init__.py
+++ b/runtimes/v1/azure_functions_runtime_v1/__init__.py
@@ -11,6 +11,7 @@
stop_threadpool_executor,
get_threadpool_executor,
)
+from .utils.executor import invocation_id_cv
__all__ = ('worker_init_request',
'functions_metadata_request',
@@ -19,4 +20,5 @@
'function_load_request',
'start_threadpool_executor',
'stop_threadpool_executor',
- 'get_threadpool_executor')
+ 'get_threadpool_executor',
+ 'invocation_id_cv')
diff --git a/runtimes/v1/azure_functions_runtime_v1/bindings/context.py b/runtimes/v1/azure_functions_runtime_v1/bindings/context.py
index 6181e630f..9f66568d2 100644
--- a/runtimes/v1/azure_functions_runtime_v1/bindings/context.py
+++ b/runtimes/v1/azure_functions_runtime_v1/bindings/context.py
@@ -9,6 +9,9 @@
from .tracecontext import TraceContext
+_invocation_id_local = threading.local()
+
+
class Context:
def __init__(self,
func_name: str,
@@ -66,4 +69,4 @@ def get_context(invoc_request, name: str,
return Context(
name, directory, invoc_request.invocation_id,
- threading.local(), trace_context, retry_context)
+ _invocation_id_local, trace_context, retry_context)
diff --git a/runtimes/v1/azure_functions_runtime_v1/handle_event.py b/runtimes/v1/azure_functions_runtime_v1/handle_event.py
index 4e877e3f8..da3b39e2f 100644
--- a/runtimes/v1/azure_functions_runtime_v1/handle_event.py
+++ b/runtimes/v1/azure_functions_runtime_v1/handle_event.py
@@ -5,6 +5,7 @@
import logging
import os
import sys
+import time
import typing
from .functions import FunctionInfo, Registry
@@ -275,6 +276,9 @@ async def function_environment_reload_request(request):
for var in env_vars:
os.environ[var] = env_vars[var]
+ # Refresh timezone information after environment reload
+ time.tzset()
+
if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING):
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
diff --git a/runtimes/v1/azure_functions_runtime_v1/utils/executor.py b/runtimes/v1/azure_functions_runtime_v1/utils/executor.py
index 49e973b14..12dd04e40 100644
--- a/runtimes/v1/azure_functions_runtime_v1/utils/executor.py
+++ b/runtimes/v1/azure_functions_runtime_v1/utils/executor.py
@@ -2,6 +2,7 @@
# Licensed under the MIT License.
import asyncio
+import contextvars
import functools
from typing import Any
@@ -21,14 +22,19 @@ def execute_sync(function, args) -> Any:
return function(**args)
+invocation_id_cv = contextvars.ContextVar('invocation_id', default=None)
+
+
def run_sync_func(invocation_id, context, func, params):
# This helper exists because we need to access the current
# invocation_id from ThreadPoolExecutor's threads.
context.thread_local_storage.invocation_id = invocation_id
+ token = invocation_id_cv.set(invocation_id)
try:
if otel_manager.get_azure_monitor_available():
configure_opentelemetry(context)
result = functools.partial(execute_sync, func)
return result(params)
finally:
+ invocation_id_cv.reset(token)
context.thread_local_storage.invocation_id = None
diff --git a/runtimes/v2/azure_functions_runtime/__init__.py b/runtimes/v2/azure_functions_runtime/__init__.py
index a459b8957..4bedad94f 100644
--- a/runtimes/v2/azure_functions_runtime/__init__.py
+++ b/runtimes/v2/azure_functions_runtime/__init__.py
@@ -10,6 +10,7 @@
stop_threadpool_executor,
get_threadpool_executor,
)
+from .utils.executor import invocation_id_cv
__all__ = ('worker_init_request',
'functions_metadata_request',
@@ -18,4 +19,5 @@
'function_load_request',
'start_threadpool_executor',
'stop_threadpool_executor',
- 'get_threadpool_executor')
+ 'get_threadpool_executor',
+ 'invocation_id_cv')
diff --git a/runtimes/v2/azure_functions_runtime/bindings/context.py b/runtimes/v2/azure_functions_runtime/bindings/context.py
index eb5229ca4..83a342f53 100644
--- a/runtimes/v2/azure_functions_runtime/bindings/context.py
+++ b/runtimes/v2/azure_functions_runtime/bindings/context.py
@@ -6,6 +6,9 @@
from .tracecontext import TraceContext
+_invocation_id_local = threading.local()
+
+
class Context:
def __init__(self,
func_name: str,
@@ -63,4 +66,4 @@ def get_context(invoc_request, name: str,
return Context(
name, directory, invoc_request.invocation_id,
- threading.local(), trace_context, retry_context)
+ _invocation_id_local, trace_context, retry_context)
diff --git a/runtimes/v2/azure_functions_runtime/handle_event.py b/runtimes/v2/azure_functions_runtime/handle_event.py
index 3d6b8acfe..0ed133f93 100644
--- a/runtimes/v2/azure_functions_runtime/handle_event.py
+++ b/runtimes/v2/azure_functions_runtime/handle_event.py
@@ -4,6 +4,7 @@
import logging
import os
import sys
+import time
from typing import List, MutableMapping, Optional
@@ -306,7 +307,6 @@ async def function_environment_reload_request(request):
"Version %s", VERSION)
global _host, protos
try:
-
func_env_reload_request = \
request.request.function_environment_reload_request
directory = func_env_reload_request.function_app_directory
@@ -323,7 +323,8 @@ async def function_environment_reload_request(request):
for var in env_vars:
os.environ[var] = env_vars[var]
- # TODO: Apply PYTHON_THREADPOOL_THREAD_COUNT
+ # Refresh timezone information after environment reload
+ time.tzset()
if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING):
root_logger = logging.getLogger()
diff --git a/runtimes/v2/azure_functions_runtime/utils/executor.py b/runtimes/v2/azure_functions_runtime/utils/executor.py
index 52330dcc8..718647ae6 100644
--- a/runtimes/v2/azure_functions_runtime/utils/executor.py
+++ b/runtimes/v2/azure_functions_runtime/utils/executor.py
@@ -1,6 +1,7 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import asyncio
+import contextvars
import functools
from typing import Any
@@ -20,10 +21,14 @@ def execute_sync(function, args) -> Any:
return function(**args)
+invocation_id_cv = contextvars.ContextVar('invocation_id', default=None)
+
+
def run_sync_func(invocation_id, context, func, params):
# This helper exists because we need to access the current
# invocation_id from ThreadPoolExecutor's threads.
context.thread_local_storage.invocation_id = invocation_id
+ token = invocation_id_cv.set(invocation_id)
try:
if (otel_manager.get_azure_monitor_available()
or otel_manager.get_otel_libs_available()):
@@ -31,4 +36,5 @@ def run_sync_func(invocation_id, context, func, params):
result = functools.partial(execute_sync, func)
return result(params)
finally:
+ invocation_id_cv.reset(token)
context.thread_local_storage.invocation_id = None
diff --git a/runtimes/v2/azure_functions_runtime/version.py b/runtimes/v2/azure_functions_runtime/version.py
index c9cee5832..f21832533 100644
--- a/runtimes/v2/azure_functions_runtime/version.py
+++ b/runtimes/v2/azure_functions_runtime/version.py
@@ -1,4 +1,4 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
-VERSION = '1.1.0b3'
+VERSION = '1.1.0b4'
diff --git a/runtimes/v2/tests/unittests/test_deferred_bindings.py b/runtimes/v2/tests/unittests/test_deferred_bindings.py
index 1723cab71..66f40c3bf 100644
--- a/runtimes/v2/tests/unittests/test_deferred_bindings.py
+++ b/runtimes/v2/tests/unittests/test_deferred_bindings.py
@@ -1,7 +1,5 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
-import unittest
-
import azure.functions as func
import azurefunctions.extensions.base as clients
import tests.protos as protos
@@ -26,7 +24,6 @@ def setUp(self):
# Initialize DEFERRED_BINDING_REGISTRY
meta.load_binding_registry()
- @unittest.skip("TODO: Move to emulator.")
def test_mbd_deferred_bindings_enabled_decode(self):
binding = BlobClientConverter
pb = protos.ParameterBinding(name='test',
@@ -35,7 +32,7 @@ def test_mbd_deferred_bindings_enabled_decode(self):
sample_mbd = MockMBD(version="1.0",
source="AzureStorageBlobs",
content_type="application/json",
- content="{\"Connection\":\"AzureWebJobsStorage\","
+ content="{\"Connection\":\"AZURE_STORAGE_CONNECTION_STRING\"," # noqa
"\"ContainerName\":"
"\"python-worker-tests\","
"\"BlobName\":"
diff --git a/workers/azure_functions_worker/dispatcher.py b/workers/azure_functions_worker/dispatcher.py
index 5907edd93..fa8be1117 100644
--- a/workers/azure_functions_worker/dispatcher.py
+++ b/workers/azure_functions_worker/dispatcher.py
@@ -13,6 +13,7 @@
import queue
import sys
import threading
+import time
from asyncio import BaseEventLoop
from datetime import datetime
from logging import LogRecord
@@ -775,6 +776,9 @@ async def _handle__function_environment_reload_request(self, request):
for var in env_vars:
os.environ[var] = env_vars[var]
+ # Refresh timezone information after environment reload
+ time.tzset()
+
# Apply PYTHON_THREADPOOL_THREAD_COUNT
self._stop_sync_call_tp()
self._sync_call_tp = (
diff --git a/workers/azure_functions_worker/utils/dependency.py b/workers/azure_functions_worker/utils/dependency.py
index a5e32ba3c..3bf95ac03 100644
--- a/workers/azure_functions_worker/utils/dependency.py
+++ b/workers/azure_functions_worker/utils/dependency.py
@@ -152,6 +152,18 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None):
if not cx_deps_path:
cx_deps_path = cls.cx_deps_path
+ # Fallback: if cx_deps_path is still empty and we have a working
+ # directory, try the well-known .python_packages path. This
+ # handles Flex Consumption where the app content is mounted after
+ # the worker process starts (so the path was never in sys.path).
+ if not cx_deps_path and working_directory:
+ candidate = os.path.join(
+ working_directory, '.python_packages', 'lib',
+ 'site-packages'
+ )
+ if os.path.isdir(candidate):
+ cx_deps_path = candidate
+
logger.info(
'Applying prioritize_customer_dependencies: '
'worker_dependencies_path: %s, customer_dependencies_path: %s, '
@@ -161,7 +173,7 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None):
is_envvar_true("WEBSITE_PLACEHOLDER_MODE"))
cls._remove_from_sys_path(cls.worker_deps_path)
- cls._add_to_sys_path(cls.cx_deps_path, True)
+ cls._add_to_sys_path(cx_deps_path, True)
# Deprioritize worker dependencies but don't completely remove it
# Otherwise, it will break some really old function apps, those
diff --git a/workers/azure_functions_worker/version.py b/workers/azure_functions_worker/version.py
index 30e5c590a..7c456248c 100644
--- a/workers/azure_functions_worker/version.py
+++ b/workers/azure_functions_worker/version.py
@@ -1,4 +1,4 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
-VERSION = '4.41.0'
+VERSION = '4.41.2'
diff --git a/workers/proxy_worker/dispatcher.py b/workers/proxy_worker/dispatcher.py
index 7c14f0349..55aaeb3b1 100644
--- a/workers/proxy_worker/dispatcher.py
+++ b/workers/proxy_worker/dispatcher.py
@@ -14,6 +14,7 @@
from typing import Any, Optional
import grpc
+
from proxy_worker import protos
from proxy_worker.logging import (
CONSOLE_LOG_PREFIX,
@@ -32,7 +33,6 @@
PYTHON_ENABLE_DEBUG_LOGGING,
)
from proxy_worker.version import VERSION
-
from .utils.dependency import DependencyManager
# Library worker import reloaded in init and reload request
@@ -99,10 +99,12 @@ def get_global_current_invocation_id() -> Optional[str]:
def get_current_invocation_id() -> Optional[Any]:
+ global _library_worker
# Check global current invocation first (most up-to-date)
- global_invocation_id = get_global_current_invocation_id()
- if global_invocation_id is not None:
- return global_invocation_id
+ if _library_worker and not hasattr(_library_worker, 'invocation_id_cv'):
+ global_invocation_id = get_global_current_invocation_id()
+ if global_invocation_id is not None:
+ return global_invocation_id
# Check asyncio task context
try:
@@ -125,6 +127,17 @@ def get_current_invocation_id() -> Optional[Any]:
if thread_invocation_id is not None:
return thread_invocation_id
+ # Check contextvar from library worker
+ if _library_worker:
+ try:
+ cv = getattr(_library_worker, 'invocation_id_cv', None)
+ if cv:
+ val = cv.get()
+ if val is not None:
+ return val
+ except (AttributeError, LookupError):
+ pass
+
return getattr(_invocation_id_local, 'invocation_id', None)
diff --git a/workers/proxy_worker/utils/dependency.py b/workers/proxy_worker/utils/dependency.py
index ea165785f..07f270d2e 100644
--- a/workers/proxy_worker/utils/dependency.py
+++ b/workers/proxy_worker/utils/dependency.py
@@ -134,9 +134,21 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None):
if not cx_deps_path:
cx_deps_path = cls.cx_deps_path
+ # Fallback: if cx_deps_path is still empty and we have a working
+ # directory, try the well-known .python_packages path. This
+ # handles Flex Consumption where the app content is mounted after
+ # the worker process starts (so the path was never in sys.path).
+ if not cx_deps_path and working_directory:
+ candidate = os.path.join(
+ working_directory, '.python_packages', 'lib',
+ 'site-packages'
+ )
+ if os.path.isdir(candidate):
+ cx_deps_path = candidate
+
cls._remove_from_sys_path(cls.worker_deps_path)
cls._add_to_sys_path(cls.worker_deps_path, True)
- cls._add_to_sys_path(cls.cx_deps_path, True)
+ cls._add_to_sys_path(cx_deps_path, True)
cls._add_to_sys_path(working_directory, False)
logger.info(
diff --git a/workers/proxy_worker/version.py b/workers/proxy_worker/version.py
index 30e5c590a..7c456248c 100644
--- a/workers/proxy_worker/version.py
+++ b/workers/proxy_worker/version.py
@@ -1,4 +1,4 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
-VERSION = '4.41.0'
+VERSION = '4.41.2'
diff --git a/workers/pyproject.toml b/workers/pyproject.toml
index 09793ed61..9d2ca7e45 100644
--- a/workers/pyproject.toml
+++ b/workers/pyproject.toml
@@ -13,9 +13,6 @@ classifiers = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
@@ -31,22 +28,20 @@ dependencies = [
"azure-functions==1.24.0; python_version < '3.10'",
"azure-functions==1.25.0b2; python_version >= '3.10'",
"python-dateutil~=2.9.0",
- "protobuf~=3.19.3; python_version == '3.7'",
- "protobuf~=4.25.3; python_version >= '3.8' and python_version < '3.13'",
+ "protobuf~=4.25.3; python_version < '3.13'",
"protobuf~=5.29.0; python_version == '3.13'",
"protobuf~=6.32.1; python_version == '3.14'",
- "grpcio-tools~=1.43.0; python_version == '3.7'",
- "grpcio-tools~=1.59.0; python_version >= '3.8' and python_version < '3.13'",
+ "grpcio-tools~=1.59.0;python_version < '3.13'",
"grpcio-tools~=1.70.0; python_version == '3.13'",
"grpcio-tools~=1.75.1; python_version == '3.14'",
- "grpcio~=1.43.0; python_version == '3.7'",
- "grpcio ~=1.59.0; python_version >= '3.8' and python_version < '3.13'",
+ "grpcio ~=1.59.0; python_version < '3.13'",
"grpcio~=1.70.0; python_version == '3.13'",
"grpcio~=1.75.1; python_version == '3.14'",
- "uvloop~=0.21.0; python_version >= '3.13' and sys_platform != 'win32'",
- "azurefunctions-extensions-base==1.1.0; python_version >= '3.8'",
- "azure-functions-runtime==1.1.0b3; python_version >= '3.13'",
- "azure-functions-runtime-v1==1.1.0b2; python_version >= '3.13'"
+ "uvloop~=0.21.0; python_version == '3.13' and sys_platform != 'win32'",
+ "uvloop~=0.22.0; python_version == '3.14' and sys_platform != 'win32'",
+ "azurefunctions-extensions-base==1.1.0",
+ "azure-functions-runtime==1.1.0b4; python_version >= '3.13'",
+ "azure-functions-runtime-v1==1.1.0b4; python_version >= '3.13'"
]
[project.urls]
@@ -58,18 +53,18 @@ dev = [
"azure-cosmos", # Used for CosmosDB Emulator tests
"azure-eventhub", # Used for EventHub E2E tests
"azure-functions-durable", # Used for Durable E2E tests
- "azure-monitor-opentelemetry; python_version >= '3.8'", # Used for Azure Monitor unit tests
+ "azure-monitor-opentelemetry", # Used for Azure Monitor unit tests
+ "azure-storage-blob~=12.27.1", # Used for Blob Emulator tests
"flask",
"fastapi~=0.103.2",
"pydantic",
- "flake8==5.*; python_version == '3.7'",
- "flake8==6.*; python_version >= '3.8'",
+ "flake8==6.*",
"mypy",
"pytest~=7.4.4",
"requests==2.*",
"coverage",
"pytest-sugar",
- "opentelemetry-api; python_version >= '3.8'", # Used for OpenTelemetry unit tests
+ "opentelemetry-api", # Used for OpenTelemetry unit tests
"pytest-cov",
"pytest-xdist",
"pytest-randomly",
@@ -86,6 +81,7 @@ dev = [
"pre-commit",
"invoke",
"cryptography",
+ "pyjwt",
"jsonpickle",
"orjson"
]
@@ -95,7 +91,7 @@ test-http-v2 = [
]
test-deferred-bindings = [
"azurefunctions-extensions-bindings-blob==1.1.1",
- "azurefunctions-extensions-bindings-eventhub==1.0.0b1"
+ "azurefunctions-extensions-bindings-eventhub==1.0.0b1; python_version < '3.14'"
]
[build-system]
@@ -140,4 +136,4 @@ version = {attr = "azure_functions_worker.version.VERSION"}
[tool.pytest.ini_options]
pythonpath = ["workers"]
-testpaths = ["workers/tests", "runtimes/v2/azure_functions_runtime/tests", "runtimes/v1/azure_functions_runtime_v1/tests"]
+testpaths = ["workers/tests", "runtimes/v2/azure_functions_runtime/tests", "runtimes/v1/azure_functions_runtime_v1/tests"]
\ No newline at end of file
diff --git a/workers/python/proxyV4/worker.py b/workers/python/proxyV4/worker.py
index fe5ab9627..91cf2b8db 100644
--- a/workers/python/proxyV4/worker.py
+++ b/workers/python/proxyV4/worker.py
@@ -20,12 +20,6 @@ def is_azure_environment():
or AZURE_WEBSITE_INSTANCE_ID in os.environ)
-def validate_python_version():
- minor_version = sys.version_info[1]
- if not (13 <= minor_version < 15):
- raise RuntimeError(f'Unsupported Python version: 3.{minor_version}')
-
-
def determine_user_pkg_paths():
"""This finds the user packages when function apps are running on the cloud
User packages are defined in:
@@ -43,7 +37,6 @@ def add_script_root_to_sys_path():
if __name__ == '__main__':
- validate_python_version()
func_worker_dir = str(pathlib.Path(__file__).absolute().parent)
env = os.environ
diff --git a/workers/tests/consumption_tests/test_linux_consumption.py b/workers/tests/consumption_tests/test_linux_consumption.py
index 109a6806c..3249e5bc3 100644
--- a/workers/tests/consumption_tests/test_linux_consumption.py
+++ b/workers/tests/consumption_tests/test_linux_consumption.py
@@ -12,12 +12,12 @@
PYTHON_ISOLATE_WORKER_DEPENDENCIES,
)
from requests import Request
-from tests.utils.testutils_lc import LinuxConsumptionWebHostController
+from tests.utils.testutils_lc import FlexConsumptionWebHostController
_DEFAULT_HOST_VERSION = "4"
-class TestLinuxConsumption(TestCase):
+class TestFlexConsumption(TestCase):
@classmethod
def setUpClass(cls):
@@ -27,13 +27,13 @@ def setUpClass(cls):
cls._storage = os.getenv('AzureWebJobsStorage')
if cls._storage is None:
raise RuntimeError('Environment variable AzureWebJobsStorage is '
- 'required before running Linux Consumption test')
+ 'required before running Flex Consumption test')
def test_placeholder_mode_root_returns_ok(self):
"""In any circumstances, a placeholder container should returns 200
even when it is not specialized.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
req = Request('GET', ctrl.url)
resp = ctrl.send_request(req)
@@ -43,11 +43,11 @@ def test_http_no_auth(self):
"""An HttpTrigger function app with 'azure-functions' library
should return 200.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url("HttpNoAuth")
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app("HttpNoAuth")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
@@ -68,11 +68,11 @@ def test_common_libraries(self):
should return 200 after importing all libraries.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url("CommonLibraries")
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app("CommonLibraries")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
@@ -90,11 +90,11 @@ def test_debug_logging_disabled(self):
should return 200 and by default customer debug logging should be
disabled.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url("EnableDebugLogging")
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app("EnableDebugLogging")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger1')
resp = ctrl.send_request(req)
@@ -117,11 +117,11 @@ def test_debug_logging_enabled(self):
should return 200 and with customer debug logging enabled, debug logs
should be written to container logs.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url(
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app(
"EnableDebugLogging"),
PYTHON_ENABLE_DEBUG_LOGGING: "1"
})
@@ -145,12 +145,12 @@ def test_pinning_functions_to_older_version(self):
should return 200 with the azure functions version set to 1.11.1
since dependency isolation is enabled by default for all py versions
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url(
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app(
"PinningFunctions"),
PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1",
})
@@ -168,11 +168,11 @@ def test_opencensus_with_extensions_enabled(self):
should return 200 after importing all libraries.
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url("Opencensus"),
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app("Opencensus"),
PYTHON_ENABLE_WORKER_EXTENSIONS: "1"
})
req = Request('GET', f'{ctrl.url}/api/opencensus')
@@ -183,11 +183,11 @@ def test_opencensus_with_extensions_enabled_init_indexing(self):
"""
A function app with init indexing enabled
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url("Opencensus"),
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app("Opencensus"),
PYTHON_ENABLE_WORKER_EXTENSIONS: "1",
PYTHON_ENABLE_INIT_INDEXING: "true"
})
@@ -199,11 +199,11 @@ def test_reload_variables_after_oom_error(self):
"""
A function app with HTTPtrigger mocking error code 137
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
- "SCM_RUN_FROM_PACKAGE": self._get_blob_url(
+ "SCM_RUN_FROM_PACKAGE": self._get_function_app(
"OOMError"),
PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1"
})
@@ -213,16 +213,7 @@ def test_reload_variables_after_oom_error(self):
sleep(2)
logs = ctrl.get_container_logs()
- self.assertRegex(
- logs,
- r"Applying prioritize_customer_dependencies: "
- r"worker_dependencies_path: \/azure-functions-host\/"
- r"workers\/python\/.*?\/LINUX\/X64,"
- r" customer_dependencies_path: \/home\/site\/wwwroot\/"
- r"\.python_packages\/lib\/site-packages, working_directory:"
- r" \/home\/site\/wwwroot, Linux Consumption: True,"
- r" Placeholder: False")
-
+ assert "Finished prioritize_customer_dependencies" in logs
self.assertNotIn("Failure Exception: ModuleNotFoundError",
logs)
@@ -232,12 +223,12 @@ def test_http_v2_fastapi_streaming_upload_download(self):
A function app using http v2 fastapi extension with streaming upload and
download
"""
- with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION,
+ with FlexConsumptionWebHostController(_DEFAULT_HOST_VERSION,
self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
"SCM_RUN_FROM_PACKAGE":
- self._get_blob_url("HttpV2FastApiStreaming"),
+ self._get_function_app("HttpV2FastApiStreaming"),
PYTHON_ENABLE_INIT_INDEXING: "true",
PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1"
})
@@ -264,11 +255,7 @@ def generate_random_bytes_stream():
self.assertEqual(
streamed_data, b'streamingtestingresponseisreturned')
- def _get_blob_url(self, scenario_name: str) -> str:
- base_url = "http://172.17.0.1:10000/devstoreaccount1/apps"
-
- container_sas_token = os.getenv('CONTAINER_SAS_TOKEN')
- if not container_sas_token:
- raise RuntimeError('Environment variable CONTAINER_SAS_TOKEN is '
- 'required before running Linux Consumption test')
- return f"{base_url}/{scenario_name}.zip?{container_sas_token}"
+ @staticmethod
+ def _get_function_app(scenario_name: str) -> str:
+ """Return the zip filename for the given test scenario."""
+ return f"{scenario_name}.zip"
diff --git a/workers/tests/emulator_tests/eventhub_functions/eventhub_functions_sdk/function_app.py b/workers/tests/emulator_tests/eventhub_functions/eventhub_functions_sdk/function_app.py
new file mode 100644
index 000000000..adace8c60
--- /dev/null
+++ b/workers/tests/emulator_tests/eventhub_functions/eventhub_functions_sdk/function_app.py
@@ -0,0 +1,117 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+import os
+import typing
+
+import azure.functions as func
+import azurefunctions.extensions.bindings.eventhub as eh
+
+from azure.eventhub import EventData
+from azure.eventhub.aio import EventHubProducerClient
+
+app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+
+# An HttpTrigger to generating EventHub event from EventHub Output Binding
+@app.function_name(name="eventhub_output")
+@app.route(route="eventhub_output")
+@app.event_hub_output(arg_name="event",
+ event_hub_name="python-worker-ci-eventhub-one-sdk",
+ connection="AzureWebJobsEventHubConnectionString")
+def eventhub_output(req: func.HttpRequest, event: func.Out[str]):
+ event.set(req.get_body().decode('utf-8'))
+ return 'OK'
+
+
+# This is an actual EventHub trigger which will convert the event data
+# into a storage blob.
+@app.function_name(name="eventhub_trigger")
+@app.event_hub_message_trigger(arg_name="event",
+ event_hub_name="python-worker-ci-eventhub-one-sdk",
+ connection="AzureWebJobsEventHubConnectionString"
+ )
+@app.blob_output(arg_name="$return",
+ path="python-worker-tests/test-eventhub-triggered-sdk.txt",
+ connection="AzureWebJobsStorage")
+def eventhub_trigger(event: eh.EventData) -> bytes:
+ return event.body_as_str()
+
+
+# Retrieve the event data from storage blob and return it as Http response
+@app.function_name(name="get_eventhub_triggered")
+@app.route(route="get_eventhub_triggered")
+@app.blob_input(arg_name="file",
+ path="python-worker-tests/test-eventhub-triggered-sdk.txt",
+ connection="AzureWebJobsStorage")
+def get_eventhub_triggered(req: func.HttpRequest,
+ file: func.InputStream) -> str:
+ return file.read().decode('utf-8')
+
+
+# Retrieve the event data from storage blob and return it as Http response
+@app.function_name(name="get_metadata_triggered")
+@app.route(route="get_metadata_triggered")
+@app.blob_input(arg_name="file",
+ path="python-worker-tests/test-metadata-triggered-sdk.txt",
+ connection="AzureWebJobsStorage")
+async def get_metadata_triggered(req: func.HttpRequest,
+ file: func.InputStream) -> str:
+ return func.HttpResponse(body=file.read().decode('utf-8'),
+ status_code=200,
+ mimetype='application/json')
+
+
+# An HttpTrigger to generating EventHub event from azure-eventhub SDK.
+# Events generated from azure-eventhub contain the full metadata.
+@app.function_name(name="metadata_output")
+@app.route(route="metadata_output")
+async def metadata_output(req: func.HttpRequest):
+ # Parse event metadata from http request
+ json_string = req.get_body().decode('utf-8')
+ event_dict = json.loads(json_string)
+
+ # Create an EventHub Client and event batch
+ client = EventHubProducerClient.from_connection_string(
+ os.getenv('AzureWebJobsEventHubConnectionString'),
+ eventhub_name='python-worker-ci-eventhub-one-metadata-sdk')
+
+ # Generate new event based on http request with full metadata
+ event_data_batch = await client.create_batch()
+ event_data_batch.add(EventData(event_dict.get('body')))
+
+ # Send out event into event hub
+ try:
+ await client.send_batch(event_data_batch)
+ finally:
+ await client.close()
+
+ return 'OK'
+
+
+@app.function_name(name="metadata_trigger")
+@app.event_hub_message_trigger(
+ arg_name="event",
+ event_hub_name="python-worker-ci-eventhub-one-metadata-sdk",
+ connection="AzureWebJobsEventHubConnectionString")
+@app.blob_output(arg_name="$return",
+ path="python-worker-tests/test-metadata-triggered-sdk.txt",
+ connection="AzureWebJobsStorage")
+async def metadata_trigger(event: eh.EventData) -> bytes:
+ # Formatting metadata dict basaed on EventData object
+ event_metadata = {"SystemProperties": {
+ "PartitionKey": event.partition_key,
+ "SequenceNumber": event.system_properties[b'x-opt-sequence-number'],
+ "Offset": event.system_properties[b'x-opt-offset'],
+ }}
+ event_dict: typing.Mapping[str, typing.Any] = {
+ 'body': event.body_as_str(),
+ # Uncomment this when the EnqueuedTimeUtc is fixed in azure-functions
+ # 'enqueued_time': event.enqueued_time.isoformat(),
+ 'partition_key': event.partition_key,
+ 'sequence_number': event.sequence_number,
+ 'offset': event.system_properties[b'x-opt-offset'],
+ 'metadata': event_metadata
+ }
+
+ return json.dumps(event_dict)
diff --git a/workers/tests/emulator_tests/test_blob_functions.py b/workers/tests/emulator_tests/test_blob_functions.py
index d6a840a38..840b30cc7 100644
--- a/workers/tests/emulator_tests/test_blob_functions.py
+++ b/workers/tests/emulator_tests/test_blob_functions.py
@@ -12,7 +12,6 @@ class TestBlobFunctions(testutils.WebHostTestCase):
def get_script_dir(cls):
return testutils.EMULATOR_TESTS_FOLDER / 'blob_functions'
- @testutils.retryable_test(3, 5)
def test_blob_io_str(self):
r = self.webhost.request('POST', 'put_blob_str', data='test-data')
self.assertEqual(r.status_code, 200)
diff --git a/workers/tests/emulator_tests/test_eventhub_functions.py b/workers/tests/emulator_tests/test_eventhub_functions.py
index 32a3c9378..d6559e676 100644
--- a/workers/tests/emulator_tests/test_eventhub_functions.py
+++ b/workers/tests/emulator_tests/test_eventhub_functions.py
@@ -4,6 +4,8 @@
import time
from tests.utils import testutils
+from unittest import skipIf
+import sys
class TestEventHubFunctions(testutils.WebHostTestCase):
@@ -116,3 +118,12 @@ class TestEventHubFunctionsSteinGeneric(TestEventHubFunctions):
def get_script_dir(cls):
return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_functions' / \
'eventhub_functions_stein' / 'generic'
+
+
+@skipIf(sys.version_info.minor >= 14, "Skip to figure out uamqp.")
+class TestEventHubFunctionsSDK(TestEventHubFunctions):
+
+ @classmethod
+ def get_script_dir(cls):
+ return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_functions' / \
+ 'eventhub_functions_sdk'
diff --git a/workers/tests/emulator_tests/test_servicebus_functions.py b/workers/tests/emulator_tests/test_servicebus_functions.py
index 36432b69e..cdc5e3acb 100644
--- a/workers/tests/emulator_tests/test_servicebus_functions.py
+++ b/workers/tests/emulator_tests/test_servicebus_functions.py
@@ -67,8 +67,7 @@ def get_script_dir(cls):
'servicebus_functions_stein' / 'generic'
-@unittest.skipIf(sys.version_info.minor <= 8, "The servicebus extension"
- "is only supported for 3.9+.")
+@unittest.skipIf(sys.version_info.minor >= 14, "Skip to figure out uamqp.")
class TestServiceBusSDKFunctions(testutils.WebHostTestCase):
@classmethod
diff --git a/workers/tests/emulator_tests/utils/eventhub/config.json b/workers/tests/emulator_tests/utils/eventhub/config.json
index 710935c14..ba14a13bb 100644
--- a/workers/tests/emulator_tests/utils/eventhub/config.json
+++ b/workers/tests/emulator_tests/utils/eventhub/config.json
@@ -40,6 +40,24 @@
"Name": "cg1"
}
]
+ },
+ {
+ "Name": "python-worker-ci-eventhub-one-sdk",
+ "PartitionCount": 2,
+ "ConsumerGroups": [
+ {
+ "Name": "cg1"
+ }
+ ]
+ },
+ {
+ "Name": "python-worker-ci-eventhub-one-metadata-sdk",
+ "PartitionCount": 2,
+ "ConsumerGroups": [
+ {
+ "Name": "cg1"
+ }
+ ]
}
]
}
diff --git a/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py b/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py
index 94f05bf22..4f2caa97f 100644
--- a/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py
+++ b/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py
@@ -12,7 +12,7 @@
@app.event_grid_trigger(arg_name="event")
@app.blob_output(arg_name="$return",
path="python-worker-tests/test-eventgrid-triggered.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def event_grid_trigger(event: func.EventGridEvent) -> str:
logging.info("Event grid function is triggered!")
return json.dumps({
@@ -54,10 +54,10 @@ def eventgrid_output_binding(
@app.function_name(name="eventgrid_output_binding_message_to_blobstore")
@app.queue_trigger(arg_name="msg", queue_name="test-event-grid-storage-queue",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
@app.blob_output(arg_name="$return",
path="python-worker-tests/test-eventgrid-output-binding.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def eventgrid_output_binding_message_to_blobstore(
msg: func.QueueMessage) -> bytes:
return msg.get_body()
@@ -67,7 +67,7 @@ def eventgrid_output_binding_message_to_blobstore(
@app.route(route="eventgrid_output_binding_success")
@app.blob_input(arg_name="file",
path="python-worker-tests/test-eventgrid-output-binding.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def eventgrid_output_binding_success(
req: func.HttpRequest, file: func.InputStream) -> str:
return file.read().decode('utf-8')
@@ -77,7 +77,7 @@ def eventgrid_output_binding_success(
@app.route(route="get_eventgrid_triggered")
@app.blob_input(arg_name="file",
path="python-worker-tests/test-eventgrid-triggered.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def get_eventgrid_triggered(
req: func.HttpRequest, file: func.InputStream) -> str:
return file.read().decode('utf-8')
diff --git a/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py b/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py
index 5dff24d80..4403e5329 100644
--- a/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py
+++ b/workers/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py
@@ -13,7 +13,7 @@
@app.generic_output_binding(
arg_name="$return",
type="blob",
- connection="AzureWebJobsStorage",
+ connection="STORAGE_CONNECTION",
path="python-worker-tests/test-eventgrid-triggered.txt")
def event_grid_trigger(event: func.EventGridEvent) -> str:
logging.info("Event grid function is triggered!")
@@ -61,11 +61,11 @@ def eventgrid_output_binding(
@app.generic_trigger(arg_name="msg",
type="queueTrigger",
queue_name="test-event-grid-storage-queue",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
@app.generic_output_binding(
arg_name="$return",
type="blob",
- connection="AzureWebJobsStorage",
+ connection="STORAGE_CONNECTION",
path="python-worker-tests/test-eventgrid-output-binding.txt")
def eventgrid_output_binding_message_to_blobstore(
msg: func.QueueMessage) -> bytes:
@@ -80,7 +80,7 @@ def eventgrid_output_binding_message_to_blobstore(
arg_name="file",
type="blob",
path="python-worker-tests/test-eventgrid-output-binding.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def eventgrid_output_binding_success(
req: func.HttpRequest, file: func.InputStream) -> str:
return file.read().decode('utf-8')
@@ -94,7 +94,7 @@ def eventgrid_output_binding_success(
arg_name="file",
type="blob",
path="python-worker-tests/test-eventgrid-triggered.txt",
- connection="AzureWebJobsStorage")
+ connection="STORAGE_CONNECTION")
def get_eventgrid_triggered(
req: func.HttpRequest, file: func.InputStream) -> str:
return file.read().decode('utf-8')
diff --git a/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json b/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json
index f25661fdb..c7935c4c3 100644
--- a/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json
+++ b/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json
@@ -6,13 +6,13 @@
"type": "queueTrigger",
"direction": "in",
"queueName": "test-event-grid-storage-queue",
- "connection": "AzureWebJobsStorage"
+ "connection": "STORAGE_CONNECTION"
},
{
"type": "blob",
"direction": "out",
"name": "$return",
- "connection": "AzureWebJobsStorage",
+ "connection": "STORAGE_CONNECTION",
"path": "python-worker-tests/test-eventgrid-output-binding.txt"
}
]
diff --git a/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json b/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json
index e63945d3a..772fe35ea 100644
--- a/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json
+++ b/workers/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json
@@ -11,7 +11,7 @@
"type": "blob",
"direction": "in",
"name": "file",
- "connection": "AzureWebJobsStorage",
+ "connection": "STORAGE_CONNECTION",
"path": "python-worker-tests/test-eventgrid-output-binding.txt"
},
{
diff --git a/workers/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json b/workers/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json
index bf33c7072..178c219d1 100644
--- a/workers/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json
+++ b/workers/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json
@@ -11,7 +11,7 @@
"type": "blob",
"direction": "out",
"name": "$return",
- "connection": "AzureWebJobsStorage",
+ "connection": "STORAGE_CONNECTION",
"path": "python-worker-tests/test-eventgrid-triggered.txt"
}
]
diff --git a/workers/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json b/workers/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json
index 2c2727754..7ccc66eca 100644
--- a/workers/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json
+++ b/workers/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json
@@ -11,7 +11,7 @@
"type": "blob",
"direction": "in",
"name": "file",
- "connection": "AzureWebJobsStorage",
+ "connection": "STORAGE_CONNECTION",
"path": "python-worker-tests/test-eventgrid-triggered.txt"
},
{
diff --git a/workers/tests/unittest_proxy/test_dispatcher.py b/workers/tests/unittest_proxy/test_dispatcher.py
index 995f63597..976b09a90 100644
--- a/workers/tests/unittest_proxy/test_dispatcher.py
+++ b/workers/tests/unittest_proxy/test_dispatcher.py
@@ -20,6 +20,9 @@
)
+_real_import = builtins.__import__
+
+
class TestDispatcher(unittest.TestCase):
@patch("proxy_worker.dispatcher.queue.Queue")
@@ -316,7 +319,7 @@ async def test_worker_init_starts_threadpool(mock_eol, mock_streaming,
def fake_import(name, *a, **k):
if name == "azure_functions_runtime":
return runtime_module
- return builtins.__import__(name, *a, **k)
+ return _real_import(name, *a, **k)
mock_import.side_effect = fake_import
dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071,
@@ -344,7 +347,7 @@ async def test_env_reload_starts_threadpool(mock_eol, mock_streaming,
def fake_import(name, *a, **k):
if name == "azure_functions_runtime":
return runtime_module
- return builtins.__import__(name, *a, **k)
+ return _real_import(name, *a, **k)
mock_import.side_effect = fake_import
dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071,
@@ -379,7 +382,7 @@ async def test_worker_init_missing_threadpool_apis(mock_eol,
def fake_import(name, *a, **k):
if name == "azure_functions_runtime":
return runtime_module
- return builtins.__import__(name, *a, **k)
+ return _real_import(name, *a, **k)
mock_import.side_effect = fake_import
dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071,
@@ -409,6 +412,9 @@ def setUp(self):
with dispatcher_module._current_invocation_lock:
dispatcher_module._current_invocation_id = None
+ # Clear library worker to ensure tests run with expected state
+ dispatcher_module._library_worker = None
+
def tearDown(self):
"""Clean up after each test"""
# Import the module-level variables properly
@@ -422,6 +428,9 @@ def tearDown(self):
with dispatcher_module._current_invocation_lock:
dispatcher_module._current_invocation_id = None
+ # Clear library worker
+ dispatcher_module._library_worker = None
+
def test_global_invocation_id_set_and_get(self):
"""Test setting and getting global current invocation ID"""
test_id = "test-invocation-123"
@@ -457,8 +466,8 @@ def test_thread_invocation_registry(self):
# Test clear non-existent (should not raise)
clear_thread_invocation_id(99999)
- def test_get_current_invocation_id_priority_global(self):
- """Test that global invocation ID has highest priority"""
+ def test_get_current_invocation_id_ignores_global_by_default(self):
+ """Test that global invocation ID is ignored by default"""
global_id = "global-123"
thread_id = threading.get_ident()
thread_id_value = "thread-456"
@@ -467,9 +476,9 @@ def test_get_current_invocation_id_priority_global(self):
set_current_invocation_id(global_id)
set_thread_invocation_id(thread_id, thread_id_value)
- # Global should take priority
+ # Thread should take priority (global is ignored)
result = get_current_invocation_id()
- self.assertEqual(result, global_id)
+ self.assertEqual(result, thread_id_value)
def test_get_current_invocation_id_fallback_to_thread(self):
"""Test fallback to thread registry when global is None"""
diff --git a/workers/tests/utils/testutils_docker.py b/workers/tests/utils/testutils_docker.py
index feaa186d4..05d3545df 100644
--- a/workers/tests/utils/testutils_docker.py
+++ b/workers/tests/utils/testutils_docker.py
@@ -68,7 +68,10 @@ class WebHostDockerContainerBase(unittest.TestCase):
def find_latest_image(image_repo: str,
image_url: str) -> str:
- regex = re.compile(_HOST_VERSION + r'.\d+.\d+-python' + _python_version)
+ # New regex to match version format: 4.1042.100-4-python3.11
+ regex = re.compile(_HOST_VERSION
+ + r'\.10\d+\.\d+(-\d+)?-python'
+ + _python_version + r'(-appservice)?$')
response = requests.get(image_url, allow_redirects=True)
if not response.ok:
@@ -88,8 +91,16 @@ def find_latest_image(image_repo: str,
# sorting all the python versions based on the runtime version and
# getting the latest released runtime version for python.
- latest_version = sorted(python_versions, key=lambda x: float(
- x.split(_HOST_VERSION + '.')[-1].split("-python")[0]))[-1]
+ # Parse version format: 4.1042.100-4-python3.11
+ def parse_version(tag):
+ # "4.1042.100-4"
+ version_part = tag.split('-python')[0]
+ # ["4", "1042", "100", "4"]
+ parts = version_part.replace('-', '.').split('.')
+ # (4, 1042, 100, 4)
+ return tuple(int(p) for p in parts)
+
+ latest_version = sorted(python_versions, key=parse_version)[-1]
image_tag = f'{image_repo}:{latest_version}'
return image_tag
diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py
index 94979adb0..c98cc7f59 100644
--- a/workers/tests/utils/testutils_lc.py
+++ b/workers/tests/utils/testutils_lc.py
@@ -21,13 +21,19 @@
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import padding
-from tests.utils.constants import PROJECT_ROOT
+from tests.utils.constants import PROJECT_ROOT, TESTS_ROOT
-# Linux Consumption Testing Constants
+_FUNCTION_APP_ZIPS_DIR = TESTS_ROOT / 'consumption_tests' / 'function_app_zips'
+
+# Flex Consumption Testing Constants
_DOCKER_PATH = "DOCKER_PATH"
_DOCKER_DEFAULT_PATH = "docker"
-_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list"
-_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh"
+_OS_TYPE = "bookworm" if sys.version_info.minor < 14 else "noble"
+_MESH_IMAGE_URL = (
+ f"https://mcr.microsoft.com/v2/azure-functions/{_OS_TYPE}/"
+ "flexconsumption/tags/list"
+)
+_MESH_IMAGE_REPO = f"mcr.microsoft.com/azure-functions/{_OS_TYPE}/flexconsumption"
_FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \
"/archive/refs/heads/dev.zip"
_FUNC_FILE_NAME = "azure-functions-python-library-dev"
@@ -36,7 +42,7 @@
'extensions/archive/refs/heads/dev.zip'
-class LinuxConsumptionWebHostController:
+class FlexConsumptionWebHostController:
"""A controller for spawning mesh Docker container and apply multiple
test cases on it.
"""
@@ -71,30 +77,37 @@ def assign_container(self, env: Dict[str, str] = {}):
env["FUNCTIONS_WORKER_RUNTIME"] = "python"
env["FUNCTIONS_WORKER_RUNTIME_VERSION"] = self._py_version
env["WEBSITE_SITE_NAME"] = self._uuid
- env["WEBSITE_HOSTNAME"] = f"{self._uuid}.azurewebsites.com"
-
- # Debug: Print SCM_RUN_FROM_PACKAGE value
- scm_package = env.get("SCM_RUN_FROM_PACKAGE", "NOT_SET")
- print(f"🔍 DEBUG: SCM_RUN_FROM_PACKAGE in env: {scm_package}")
+ env["WEBSITE_POD_NAME"] = self._uuid
# Wait for the container to be ready
- max_retries = 60
+ max_retries = 10
for i in range(max_retries):
try:
ping_req = requests.Request(method="GET", url=f"{url}/admin/host/ping")
ping_response = self.send_request(ping_req)
if ping_response.ok:
- print(f"🔍 DEBUG: Container ready after {i + 1} attempts")
break
- else:
- print("🔍 DEBUG: Ping attempt {i+1}/60 failed with status "
- f"{ping_response.status_code}")
except Exception as e:
- print(f"🔍 DEBUG: Ping attempt {i + 1}/60 failed with exception: {e}")
+ pass
time.sleep(1)
else:
raise RuntimeError(f'Container {self._uuid} did not become ready in time')
+ # Flex/Legion host does not download app content during assign (it's a
+ # no-op). In local Docker tests there is no Legion infrastructure, so
+ # we must manually mount the content BEFORE the assign call so the
+ # host can discover functions when it specializes.
+ pkg_name = env.get("SCM_RUN_FROM_PACKAGE") or env.get(
+ "WEBSITE_RUN_FROM_PACKAGE"
+ )
+ if pkg_name:
+ local_zip = _FUNCTION_APP_ZIPS_DIR / pkg_name
+ if not local_zip.exists():
+ raise RuntimeError(
+ f"Local function app zip not found: {local_zip}"
+ )
+ self._mount_package_in_container(str(local_zip))
+
# Send the specialization context via a POST request
req = requests.Request(
method="POST",
@@ -112,6 +125,57 @@ def assign_container(self, env: Dict[str, str] = {}):
f' at {url} (status {response.status_code}).'
f' stdout: {stdout}')
+ def _mount_package_in_container(self, local_path: str):
+ """Copy a local function app package into the container and
+ mount/extract it at /home/site/wwwroot.
+
+ Supports both regular zip files (PK magic) and SquashFS images
+ (hsqs magic) which Azure Functions uses for Flex Consumption.
+ """
+ with open(local_path, "rb") as f:
+ magic = f.read(4)
+
+ is_squashfs = (magic == b'hsqs')
+ is_zip = (magic[:2] == b'PK')
+
+ if not is_squashfs and not is_zip:
+ raise RuntimeError(
+ f"{local_path} is neither a zip nor a squashfs image. "
+ f"First 4 bytes: {magic}"
+ )
+
+ container_pkg = "/tmp/app.sqsh" if is_squashfs else "/tmp/app.zip"
+
+ # Copy the package into the container
+ subprocess.run(
+ [self._docker_cmd, "cp", local_path,
+ f"{self._uuid}:{container_pkg}"],
+ check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ )
+
+ if is_squashfs:
+ # Mount squashfs image at /home/site/wwwroot
+ subprocess.run(
+ [self._docker_cmd, "exec", self._uuid,
+ "bash", "-c",
+ "mkdir -p /home/site/wwwroot "
+ f"&& mount -t squashfs -o loop {container_pkg} "
+ "/home/site/wwwroot"],
+ check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ )
+ else:
+ # Extract zip using Python's zipfile
+ subprocess.run(
+ [self._docker_cmd, "exec", self._uuid,
+ "python", "-c",
+ "import zipfile, os; "
+ "os.makedirs('/home/site/wwwroot', exist_ok=True); "
+ f"zipfile.ZipFile('{container_pkg}').extractall("
+ "'/home/site/wwwroot'); "
+ f"os.remove('{container_pkg}')"],
+ check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ )
+
def send_request(
self,
req: requests.Request,
@@ -125,16 +189,9 @@ def send_request(
prepped = session.prepare_request(req)
prepped.headers['Content-Type'] = 'application/json'
- # Try to generate a proper JWT token first
- try:
- jwt_token = self._generate_jwt_token()
- # Use JWT token for newer Azure Functions host versions
- prepped.headers['Authorization'] = f'Bearer {jwt_token}'
- except ImportError:
- # Fall back to the old SWT token format if jwt library is not available
- swt_token = self._get_site_restricted_token()
- prepped.headers['x-ms-site-restricted-token'] = swt_token
- prepped.headers['Authorization'] = f'Bearer {swt_token}'
+ # For flex consumption, use JWT Bearer token
+ jwt_token = self._generate_jwt_token()
+ prepped.headers['Authorization'] = f'Bearer {jwt_token}'
# Add additional headers required by Azure Functions host
prepped.headers['x-site-deployment-id'] = self._uuid
@@ -215,23 +272,42 @@ def _download_extensions() -> str:
def spawn_container(self,
image: str,
env: Dict[str, str] = {}) -> int:
- """Create a docker container and record its port. Create a docker
- container according to the image name. Return the port of container.
- """
- # Construct environment variables and start the docker container
- worker_path = os.path.join(PROJECT_ROOT, 'azure_functions_worker')
-
- # TODO: Mount library in docker container
- # self._download_azure_functions()
-
- # Download python extension base package
- ext_folder = self._download_extensions()
+ """Create a docker container and record its port."""
+ os.environ['_DUMMY_CONT_KEY'] = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="
+ worker_name = 'azure_functions_worker' \
+ if sys.version_info.minor < 13 else 'proxy_worker'
+ worker_path = os.path.join(PROJECT_ROOT, worker_name)
container_worker_path = (
f"/azure-functions-host/workers/python/{self._py_version}/"
- "LINUX/X64/azure_functions_worker"
+ f"LINUX/X64/{worker_name}"
)
+ # For Python 3.13+, also mount the runtime libraries
+ runtime_v2_path = None
+ runtime_v1_path = None
+ container_runtime_v2_path = None
+ container_runtime_v1_path = None
+
+ if sys.version_info.minor >= 13:
+ repo_root = os.path.dirname(PROJECT_ROOT)
+ runtime_v2_path = os.path.join(
+ repo_root, 'runtimes', 'v2', 'azure_functions_runtime'
+ )
+ runtime_v1_path = os.path.join(
+ repo_root, 'runtimes', 'v1', 'azure_functions_runtime_v1'
+ )
+ container_runtime_v2_path = (
+ f"/azure-functions-host/workers/python/{self._py_version}/"
+ "LINUX/X64/azure_functions_runtime"
+ )
+ container_runtime_v1_path = (
+ f"/azure-functions-host/workers/python/{self._py_version}/"
+ "LINUX/X64/azure_functions_runtime_v1"
+ )
+
+ ext_folder = self._download_extensions()
+
base_ext_container_path = (
f"/azure-functions-host/workers/python/{self._py_version}/"
"LINUX/X64/azurefunctions/extensions/base"
@@ -248,13 +324,27 @@ def spawn_container(self,
run_cmd.extend(["--cap-add", "SYS_ADMIN"])
run_cmd.extend(["--device", "/dev/fuse"])
run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"])
- run_cmd.extend(["-e",
- f"CONTAINER_ENCRYPTION_KEY={os.getenv('_DUMMY_CONT_KEY')}"])
+ encryption_key = os.getenv('_DUMMY_CONT_KEY')
+ full_key_bytes = base64.b64decode(encryption_key.encode())
+ aes_key_bytes = full_key_bytes[:32]
+ aes_key_base64 = base64.b64encode(aes_key_bytes).decode()
+ run_cmd.extend(["-e", f"CONTAINER_ENCRYPTION_KEY={aes_key_base64}"])
run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"])
- # Add required environment variables for JWT issuer validation
run_cmd.extend(["-e", f"WEBSITE_SITE_NAME={self._uuid}"])
- run_cmd.extend(["-e", "WEBSITE_SKU=Dynamic"])
+ run_cmd.extend(["-e", f"WEBSITE_POD_NAME={self._uuid}"])
+ run_cmd.extend(["-e", "WEBSITE_SKU=FlexConsumption"])
+ # Mount Worker Code
run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}'])
+
+ # Mount runtime libraries for Python 3.13+
+ if runtime_v2_path and runtime_v1_path:
+ run_cmd.extend([
+ "-v", f'{runtime_v2_path}:{container_runtime_v2_path}'
+ ])
+ run_cmd.extend([
+ "-v", f'{runtime_v1_path}:{container_runtime_v1_path}'
+ ])
+
run_cmd.extend(["-v",
f'{base_ext_local_path}:{base_ext_container_path}'])
@@ -316,103 +406,85 @@ def safe_kill_container(self) -> bool:
@classmethod
def _get_site_restricted_token(cls) -> str:
- """Get the header value which can be used by x-ms-site-restricted-token
- which expires in one day.
- """
- # For compatibility with older Azure Functions host versions,
- # try the old SWT format first
+ """Get SWT token for site-restricted authentication."""
exp_ns = int((time.time() + 24 * 60 * 60) * 1000000000)
token = cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), f'exp={exp_ns}')
return token
def _generate_jwt_token(self) -> str:
- """Generate a proper JWT token for newer Azure Functions host versions."""
+ """Generate JWT token for Flex consumption authentication."""
try:
import jwt
- except ImportError:
- # Fall back to SWT format if JWT library not available
- return self._get_site_restricted_token()
+ except ImportError as e:
+ raise RuntimeError("PyJWT library required. Install with: pip install pyjwt") from e
- # JWT payload matching Azure Functions host expectations
- exp_time = int(time.time()) + (24 * 60 * 60) # 24 hours from now
-
- # Use the site name consistently for issuer and audience validation
+ exp_time = int(time.time()) + (24 * 60 * 60)
+ iat_time = int(time.time())
site_name = self._uuid
- container_name = self._uuid
-
- # According to Azure Functions host analysis, use site-specific issuer format
- # This matches the ValidIssuers array in ScriptJwtBearerExtensions.cs
issuer = f"https://{site_name}.azurewebsites.net"
+
+ # Flex Consumption Host validation can be tricky with exact audience matching.
+ # Provide a comprehensive list of potential expected audiences.
+ audience = [
+ issuer,
+ f"{issuer}/",
+ site_name,
+ f"{site_name}.azurewebsites.net",
+ f"https://{site_name}.azurewebsites.net",
+ f"https://{site_name}.azurewebsites.net/",
+ "https://azure-functions-host",
+ "https://localhost",
+ ]
payload = {
'exp': exp_time,
- 'iat': int(time.time()),
- # Use site-specific issuer format that matches ValidIssuers in the host
+ 'iat': iat_time,
+ 'nbf': iat_time,
'iss': issuer,
- # For Linux Consumption in placeholder mode, audience is the container name
- 'aud': container_name
+ 'aud': site_name,
+ 'sub': site_name,
}
- # Use the same encryption key for JWT signing
- key = base64.b64decode(os.getenv('_DUMMY_CONT_KEY').encode())
+ encryption_key_str = os.getenv('_DUMMY_CONT_KEY')
+ if not encryption_key_str:
+ raise RuntimeError("_DUMMY_CONT_KEY environment variable not set")
- # Generate JWT token using HMAC SHA256 (matches Azure Functions host)
+ key_bytes = base64.b64decode(encryption_key_str.encode())
+ key = key_bytes[:32]
jwt_token = jwt.encode(payload, key, algorithm='HS256')
return jwt_token
@classmethod
- def _get_site_encrypted_context(cls,
- site_name: str,
- env: Dict[str, str]) -> str:
- """Get the encrypted context for placeholder mode specialization"""
- # Ensure WEBSITE_SITE_NAME is set to simulate production mode
+ def _get_site_encrypted_context(cls, site_name: str, env: Dict[str, str]) -> str:
+ """Get encrypted specialization context."""
env["WEBSITE_SITE_NAME"] = site_name
-
- ctx = {
- "SiteId": 1,
- "SiteName": site_name,
- "Environment": env
- }
-
+ ctx = {"SiteId": 1, "SiteName": site_name, "Environment": env}
json_ctx = json.dumps(ctx)
-
encrypted = cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), json_ctx)
return encrypted
@classmethod
def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str:
- """Encrypt plain text context into an encrypted message which can
- be accepted by the host
- """
- # Decode the encryption key
+ """Encrypt context for specialization."""
encryption_key_bytes = base64.b64decode(encryption_key.encode())
+ aes_key = encryption_key_bytes[:32]
- # Pad the plaintext to be a multiple of the AES block size
padder = padding.PKCS7(algorithms.AES.block_size).padder()
plain_text_bytes = padder.update(plain_text.encode()) + padder.finalize()
- # Initialization vector (IV) (fixed value for simplicity)
iv_bytes = '0123456789abcedf'.encode()
-
- # Create AES cipher with CBC mode
- cipher = Cipher(algorithms.AES(encryption_key_bytes),
- modes.CBC(iv_bytes), backend=default_backend())
-
- # Perform encryption
+ cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes), backend=default_backend())
encryptor = cipher.encryptor()
encrypted_bytes = encryptor.update(plain_text_bytes) + encryptor.finalize()
- # Compute SHA256 hash of the encryption key
- digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
- digest.update(encryption_key_bytes)
- key_sha256 = digest.finalize()
-
- # Encode IV, encrypted message, and SHA256 hash in base64
iv_base64 = base64.b64encode(iv_bytes).decode()
encrypted_base64 = base64.b64encode(encrypted_bytes).decode()
+
+ digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
+ digest.update(aes_key)
+ key_sha256 = digest.finalize()
key_sha256_base64 = base64.b64encode(key_sha256).decode()
- # Return the final result
return f'{iv_base64}.{encrypted_base64}.{key_sha256_base64}'
def __enter__(self):