diff --git a/.all-contributorsrc b/.all-contributorsrc
index 20c0996d58..576f5b41c8 100644
--- a/.all-contributorsrc
+++ b/.all-contributorsrc
@@ -75,7 +75,8 @@
"question",
"review",
"security",
- "test"
+ "test",
+ "talk"
]
},
{
@@ -151,7 +152,7 @@
},
{
"login": "miguelmorin",
- "name": "miguelmorin",
+ "name": "Miguel Morin",
"avatar_url": "https://avatars.githubusercontent.com/u/32396311?v=4",
"profile": "https://github.com/miguelmorin",
"contributions": [
@@ -163,7 +164,7 @@
},
{
"login": "cathiest",
- "name": "cathiest",
+ "name": "Catherine Lawrence",
"avatar_url": "https://avatars.githubusercontent.com/u/38755168?v=4",
"profile": "https://github.com/cathiest",
"contributions": [
@@ -175,7 +176,7 @@
},
{
"login": "bw-faststream",
- "name": "bw-faststream",
+ "name": "Benjamin Walden",
"avatar_url": "https://avatars.githubusercontent.com/u/54804128?v=4",
"profile": "https://github.com/bw-faststream",
"contributions": [
@@ -188,7 +189,7 @@
},
{
"login": "oforrest",
- "name": "oforrest",
+ "name": "Oliver Forrest",
"avatar_url": "https://avatars.githubusercontent.com/u/49275282?v=4",
"profile": "https://github.com/oforrest",
"contributions": [
@@ -217,7 +218,7 @@
},
{
"login": "warwick26",
- "name": "warwick26",
+ "name": "Warwick Wood",
"avatar_url": "https://avatars.githubusercontent.com/u/33690673?v=4",
"profile": "https://github.com/warwick26",
"contributions": [
@@ -257,7 +258,7 @@
},
{
"login": "ens-george-holmes",
- "name": "ens-george-holmes",
+ "name": "George Holmes",
"avatar_url": "https://avatars.githubusercontent.com/u/62715301?v=4",
"profile": "https://github.com/ens-george-holmes",
"contributions": [
@@ -291,7 +292,7 @@
},
{
"login": "rwinstanley1",
- "name": "rwinstanley1",
+ "name": "Rachel Winstanley",
"avatar_url": "https://avatars.githubusercontent.com/u/56362072?v=4",
"profile": "https://github.com/rwinstanley1",
"contributions": [
@@ -303,7 +304,7 @@
},
{
"login": "sysdan",
- "name": "Daniel",
+ "name": "Daniel Allen",
"avatar_url": "https://avatars.githubusercontent.com/u/49038294?v=4",
"profile": "https://github.com/sysdan",
"contributions": [
@@ -323,7 +324,7 @@
},
{
"login": "harisood",
- "name": "harisood",
+ "name": "Hari Sood",
"avatar_url": "https://avatars.githubusercontent.com/u/67151373?v=4",
"profile": "https://github.com/harisood",
"contributions": [
@@ -342,7 +343,7 @@
},
{
"login": "getcarter21",
- "name": "getcarter21",
+ "name": "Ian Carter",
"avatar_url": "https://avatars.githubusercontent.com/u/34555297?v=4",
"profile": "https://github.com/getcarter21",
"contributions": [
@@ -363,7 +364,7 @@
},
{
"login": "ens-brett-todd",
- "name": "ens-brett-todd",
+ "name": "Brett Todd",
"avatar_url": "https://avatars.githubusercontent.com/u/62715658?v=4",
"profile": "https://github.com/ens-brett-todd",
"contributions": [
@@ -384,7 +385,7 @@
},
{
"login": "kevinxufs",
- "name": "kevinxufs",
+ "name": "Kevin Xu",
"avatar_url": "https://avatars.githubusercontent.com/u/48526846?v=4",
"profile": "https://github.com/kevinxufs",
"contributions": [
@@ -395,7 +396,7 @@
},
{
"login": "vollmersj",
- "name": "vollmersj",
+ "name": "Sebastian Vollmer",
"avatar_url": "https://avatars.githubusercontent.com/u/12613127?v=4",
"profile": "https://github.com/vollmersj",
"contributions": [
@@ -417,7 +418,7 @@
},
{
"login": "JulesMarz",
- "name": "Jules M",
+ "name": "Jules Manser",
"avatar_url": "https://avatars.githubusercontent.com/u/40864686?v=4",
"profile": "https://github.com/JulesMarz",
"contributions": [
@@ -483,7 +484,7 @@
},
{
"login": "DDelbarre",
- "name": "DDelbarre",
+ "name": "Daniel Delbarre",
"avatar_url": "https://avatars.githubusercontent.com/u/108824056?v=4",
"profile": "https://github.com/DDelbarre",
"contributions": [
@@ -548,7 +549,7 @@
},
{
"login": "Arielle-Bennett",
- "name": "arielle-bennett",
+ "name": "Arielle Bennett",
"avatar_url": "https://avatars.githubusercontent.com/u/74651964?v=4",
"profile": "https://github.com/Arielle-Bennett",
"contributions": [
@@ -559,7 +560,7 @@
},
{
"login": "Davsarper",
- "name": "davsarper",
+ "name": "David Sarmiento Perez",
"avatar_url": "https://avatars.githubusercontent.com/u/118986872?v=4",
"profile": "https://github.com/Davsarper",
"contributions": [
@@ -578,17 +579,20 @@
"avatar_url": "https://avatars.githubusercontent.com/u/1616531?v=4",
"profile": "https://carlos.gavidia.me/",
"contributions": [
- "bug"
+ "bug",
+ "ideas",
+ "doc"
]
},
{
"login": "helendduncan",
- "name": "Helen D Little",
+ "name": "Helen Duncan Little",
"avatar_url": "https://avatars.githubusercontent.com/u/46891265?v=4",
"profile": "https://github.com/helendduncan",
"contributions": [
"bug",
- "review"
+ "review",
+ "ideas"
]
},
{
@@ -607,12 +611,13 @@
"profile": "https://github.com/dsj976",
"contributions": [
"bug",
- "doc"
+ "doc",
+ "ideas"
]
},
{
"login": "mattwestby",
- "name": "mattwestby",
+ "name": "Matt Westby",
"avatar_url": "https://avatars.githubusercontent.com/u/91054185?v=4",
"profile": "https://github.com/mattwestby",
"contributions": [
diff --git a/.github/workflows/build_documentation.yaml b/.github/workflows/build_documentation.yaml
index 5350075540..4ed3b51860 100644
--- a/.github/workflows/build_documentation.yaml
+++ b/.github/workflows/build_documentation.yaml
@@ -46,7 +46,7 @@ jobs:
run: hatch run docs:build
- name: Link Checker
- uses: lycheeverse/lychee-action@v2.0.2
+ uses: lycheeverse/lychee-action@v2.1.0
with:
args: --config='./.lychee.toml' --no-progress './docs/build/html/**/*.html'
fail: true # fail on broken links
diff --git a/.github/workflows/test_code.yaml b/.github/workflows/test_code.yaml
index 0119816136..996c06ead4 100644
--- a/.github/workflows/test_code.yaml
+++ b/.github/workflows/test_code.yaml
@@ -55,7 +55,7 @@ jobs:
shell: bash
run: npm install -g markdown-link-check
- name: Link Checker
- uses: lycheeverse/lychee-action@v2.0.2
+ uses: lycheeverse/lychee-action@v2.1.0
with:
args: --config='./.lychee.toml' --no-progress --offline '**/*.md' --exclude-path './docs'
fail: true # fail on broken links
diff --git a/.hatch/requirements-docs.txt b/.hatch/requirements-docs.txt
index 3f9d070931..95fcbdfd41 100644
--- a/.hatch/requirements-docs.txt
+++ b/.hatch/requirements-docs.txt
@@ -3,7 +3,7 @@
#
# - emoji==2.14.0
# - myst-parser==4.0.0
-# - pydata-sphinx-theme==0.15.4
+# - pydata-sphinx-theme==0.16.0
# - sphinx-togglebutton==0.3.2
# - sphinx==8.1.3
#
@@ -50,11 +50,9 @@ mdurl==0.1.2
# via markdown-it-py
myst-parser==4.0.0
# via hatch.envs.docs
-packaging==24.1
- # via
- # pydata-sphinx-theme
- # sphinx
-pydata-sphinx-theme==0.15.4
+packaging==24.2
+ # via sphinx
+pydata-sphinx-theme==0.16.0
# via hatch.envs.docs
pygments==2.18.0
# via
@@ -93,7 +91,7 @@ typing-extensions==4.12.2
# via pydata-sphinx-theme
urllib3==2.2.3
# via requests
-wheel==0.44.0
+wheel==0.45.0
# via sphinx-togglebutton
# The following packages are considered to be unsafe in a requirements file:
diff --git a/.hatch/requirements-lint.txt b/.hatch/requirements-lint.txt
index 1e636503c1..295c694a65 100644
--- a/.hatch/requirements-lint.txt
+++ b/.hatch/requirements-lint.txt
@@ -1,52 +1,52 @@
#
# This file is autogenerated by hatch-pip-compile with Python 3.12
#
-# - ansible-dev-tools==24.9.0
-# - ansible==10.5.0
+# - ansible-dev-tools==24.10.2
+# - ansible==10.6.0
# - black==24.10.0
-# - mypy==1.11.2
+# - mypy==1.13.0
# - pandas-stubs==2.2.3.241009
# - pydantic==2.9.2
-# - ruff==0.6.9
+# - ruff==0.7.4
# - types-appdirs==1.4.3.5
# - types-chevron==0.14.2.20240310
# - types-pytz==2024.2.0.20241003
# - types-pyyaml==6.0.12.20240917
-# - types-requests==2.32.0.20240914
+# - types-requests==2.32.0.20241016
#
annotated-types==0.7.0
# via pydantic
-ansible==10.5.0
+ansible==10.6.0
# via hatch.envs.lint
ansible-builder==3.1.0
# via
# ansible-dev-environment
# ansible-dev-tools
# ansible-navigator
-ansible-compat==24.9.1
+ansible-compat==24.10.0
# via
# ansible-lint
# molecule
# pytest-ansible
-ansible-core==2.17.5
+ansible-core==2.17.6
# via
# ansible
# ansible-compat
# ansible-lint
# molecule
# pytest-ansible
-ansible-creator==24.10.1
+ansible-creator==24.11.0
# via ansible-dev-tools
ansible-dev-environment==24.9.0
# via ansible-dev-tools
-ansible-dev-tools==24.9.0
+ansible-dev-tools==24.10.2
# via hatch.envs.lint
-ansible-lint==24.9.2
+ansible-lint==24.10.0
# via
# ansible-dev-tools
# ansible-navigator
-ansible-navigator==24.9.0
+ansible-navigator==24.10.0
# via ansible-dev-tools
ansible-runner==2.4.0
# via ansible-navigator
@@ -89,8 +89,6 @@ distlib==0.3.9
# virtualenv
distro==1.9.0
# via bindep
-docutils==0.21.2
- # via python-daemon
enrich==1.2.7
# via molecule
execnet==2.1.1
@@ -129,17 +127,17 @@ mdurl==0.1.2
# via markdown-it-py
molecule==24.9.0
# via ansible-dev-tools
-mypy==1.11.2
+mypy==1.13.0
# via hatch.envs.lint
mypy-extensions==1.0.0
# via
# black
# mypy
-numpy==2.1.2
+numpy==2.1.3
# via pandas-stubs
onigurumacffi==1.3.0
# via ansible-navigator
-packaging==24.1
+packaging==24.2
# via
# ansible-builder
# ansible-compat
@@ -199,7 +197,7 @@ pytest-ansible==24.9.0
# tox-ansible
pytest-xdist==3.6.1
# via tox-ansible
-python-daemon==3.0.1
+python-daemon==3.1.0
# via ansible-runner
python-gnupg==0.5.3
# via ansible-sign
@@ -222,12 +220,12 @@ referencing==0.35.1
# jsonschema-specifications
resolvelib==1.0.1
# via ansible-core
-rich==13.9.2
+rich==13.9.4
# via
# ansible-lint
# enrich
# molecule
-rpds-py==0.20.0
+rpds-py==0.21.0
# via
# jsonschema
# referencing
@@ -235,16 +233,16 @@ ruamel-yaml==0.18.6
# via ansible-lint
ruamel-yaml-clib==0.2.12
# via ruamel-yaml
-ruff==0.6.9
+ruff==0.7.4
# via hatch.envs.lint
subprocess-tee==0.4.2
# via
# ansible-compat
# ansible-dev-environment
# ansible-lint
-tox==4.23.0
+tox==4.23.2
# via tox-ansible
-tox-ansible==24.9.0
+tox-ansible==24.10.0
# via ansible-dev-tools
types-appdirs==1.4.3.5
# via hatch.envs.lint
@@ -256,7 +254,7 @@ types-pytz==2024.2.0.20241003
# pandas-stubs
types-pyyaml==6.0.12.20240917
# via hatch.envs.lint
-types-requests==2.32.0.20240914
+types-requests==2.32.0.20241016
# via hatch.envs.lint
typing-extensions==4.12.2
# via
@@ -267,7 +265,7 @@ tzdata==2024.2
# via ansible-navigator
urllib3==2.2.3
# via types-requests
-virtualenv==20.27.0
+virtualenv==20.27.1
# via tox
wcmatch==10.0
# via
@@ -275,7 +273,7 @@ wcmatch==10.0
# molecule
yamllint==1.35.1
# via ansible-lint
-zipp==3.20.2
+zipp==3.21.0
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
diff --git a/.hatch/requirements-test.txt b/.hatch/requirements-test.txt
index a14759b25b..643331837b 100644
--- a/.hatch/requirements-test.txt
+++ b/.hatch/requirements-test.txt
@@ -1,42 +1,42 @@
#
# This file is autogenerated by hatch-pip-compile with Python 3.12
#
-# [constraints] .hatch/requirements.txt (SHA256: 12cb2eff6268d97a3d9d63d3ec5d670c6f13a571befda8d279cb7ce6ab9f5bb5)
+# [constraints] .hatch/requirements.txt (SHA256: ca6dfe8295dd8d2e6e4ade0fce58d158854ce5df89be8d092b36c34fe2679f3f)
#
# - appdirs==1.4.4
-# - azure-core==1.31.0
+# - azure-core==1.32.0
# - azure-identity==1.19.0
-# - azure-keyvault-certificates==4.8.0
-# - azure-keyvault-keys==4.9.0
-# - azure-keyvault-secrets==4.8.0
+# - azure-keyvault-certificates==4.9.0
+# - azure-keyvault-keys==4.10.0
+# - azure-keyvault-secrets==4.9.0
# - azure-mgmt-compute==33.0.0
# - azure-mgmt-containerinstance==10.1.0
-# - azure-mgmt-dns==8.1.0
+# - azure-mgmt-dns==8.2.0
# - azure-mgmt-keyvault==10.3.1
# - azure-mgmt-msi==7.0.0
# - azure-mgmt-rdbms==10.1.0
-# - azure-mgmt-resource==23.1.1
+# - azure-mgmt-resource==23.2.0
# - azure-mgmt-storage==21.2.1
-# - azure-storage-blob==12.23.1
-# - azure-storage-file-datalake==12.17.0
-# - azure-storage-file-share==12.19.0
+# - azure-storage-blob==12.24.0
+# - azure-storage-file-datalake==12.18.0
+# - azure-storage-file-share==12.20.0
# - chevron==0.14.0
-# - cryptography==43.0.1
+# - cryptography==43.0.3
# - fqdn==1.5.1
# - psycopg[binary]==3.1.19
-# - pulumi-azure-native==2.66.0
-# - pulumi-azuread==6.0.0
-# - pulumi-random==4.16.6
-# - pulumi==3.136.1
+# - pulumi-azure-native==2.72.0
+# - pulumi-azuread==6.0.1
+# - pulumi-random==4.16.7
+# - pulumi==3.139.0
# - pydantic==2.9.2
-# - pyjwt[crypto]==2.9.0
+# - pyjwt[crypto]==2.10.0
# - pytz==2024.2
# - pyyaml==6.0.2
-# - rich==13.9.2
-# - simple-acme-dns==3.1.0
-# - typer==0.12.5
+# - rich==13.9.4
+# - simple-acme-dns==3.2.0
+# - typer==0.13.0
# - websocket-client==1.8.0
-# - coverage==7.6.3
+# - coverage==7.6.7
# - freezegun==1.5.1
# - pytest-mock==3.14.0
# - pytest==8.3.3
@@ -74,7 +74,7 @@ azure-common==1.1.28
# azure-mgmt-rdbms
# azure-mgmt-resource
# azure-mgmt-storage
-azure-core==1.31.0
+azure-core==1.32.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -91,15 +91,15 @@ azure-identity==1.19.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-keyvault-certificates==4.8.0
+azure-keyvault-certificates==4.9.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-keyvault-keys==4.9.0
+azure-keyvault-keys==4.10.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-keyvault-secrets==4.8.0
+azure-keyvault-secrets==4.9.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -111,7 +111,7 @@ azure-mgmt-containerinstance==10.1.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-mgmt-core==1.4.0
+azure-mgmt-core==1.5.0
# via
# -c .hatch/requirements.txt
# azure-mgmt-compute
@@ -122,7 +122,7 @@ azure-mgmt-core==1.4.0
# azure-mgmt-rdbms
# azure-mgmt-resource
# azure-mgmt-storage
-azure-mgmt-dns==8.1.0
+azure-mgmt-dns==8.2.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -138,7 +138,7 @@ azure-mgmt-rdbms==10.1.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-mgmt-resource==23.1.1
+azure-mgmt-resource==23.2.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -146,16 +146,16 @@ azure-mgmt-storage==21.2.1
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-storage-blob==12.23.1
+azure-storage-blob==12.24.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
# azure-storage-file-datalake
-azure-storage-file-datalake==12.17.0
+azure-storage-file-datalake==12.18.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-azure-storage-file-share==12.19.0
+azure-storage-file-share==12.20.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -180,9 +180,9 @@ click==8.1.7
# via
# -c .hatch/requirements.txt
# typer
-coverage==7.6.3
+coverage==7.6.7
# via hatch.envs.test
-cryptography==43.0.1
+cryptography==43.0.3
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -195,7 +195,7 @@ cryptography==43.0.1
# msal
# pyjwt
# pyopenssl
-debugpy==1.8.7
+debugpy==1.8.8
# via
# -c .hatch/requirements.txt
# pulumi
@@ -203,7 +203,7 @@ dill==0.3.9
# via
# -c .hatch/requirements.txt
# pulumi
-dnspython==2.6.1
+dnspython==2.7.0
# via
# -c .hatch/requirements.txt
# simple-acme-dns
@@ -269,7 +269,7 @@ oauthlib==3.2.2
# via
# -c .hatch/requirements.txt
# requests-oauthlib
-packaging==24.1
+packaging==24.2
# via pytest
parver==0.5
# via
@@ -295,22 +295,22 @@ psycopg-binary==3.1.19
# via
# -c .hatch/requirements.txt
# psycopg
-pulumi==3.136.1
+pulumi==3.139.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
# pulumi-azure-native
# pulumi-azuread
# pulumi-random
-pulumi-azure-native==2.66.0
+pulumi-azure-native==2.72.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-pulumi-azuread==6.0.0
+pulumi-azuread==6.0.1
# via
# -c .hatch/requirements.txt
# hatch.envs.test
-pulumi-random==4.16.6
+pulumi-random==4.16.7
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -330,7 +330,7 @@ pygments==2.18.0
# via
# -c .hatch/requirements.txt
# rich
-pyjwt==2.9.0
+pyjwt==2.10.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -340,7 +340,7 @@ pyopenssl==24.2.1
# -c .hatch/requirements.txt
# acme
# josepy
-pyrfc3339==1.1
+pyrfc3339==2.0.1
# via
# -c .hatch/requirements.txt
# acme
@@ -357,7 +357,6 @@ pytz==2024.2
# -c .hatch/requirements.txt
# hatch.envs.test
# acme
- # pyrfc3339
pyyaml==6.0.2
# via
# -c .hatch/requirements.txt
@@ -378,7 +377,7 @@ requests-oauthlib==2.0.0
# via
# -c .hatch/requirements.txt
# msrest
-rich==13.9.2
+rich==13.9.4
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -394,7 +393,7 @@ shellingham==1.5.4
# via
# -c .hatch/requirements.txt
# typer
-simple-acme-dns==3.1.0
+simple-acme-dns==3.2.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -404,7 +403,7 @@ six==1.16.0
# azure-core
# pulumi
# python-dateutil
-typer==0.12.5
+typer==0.13.0
# via
# -c .hatch/requirements.txt
# hatch.envs.test
@@ -417,7 +416,9 @@ typing-extensions==4.12.2
# azure-keyvault-keys
# azure-keyvault-secrets
# azure-mgmt-compute
+ # azure-mgmt-dns
# azure-mgmt-keyvault
+ # azure-mgmt-resource
# azure-storage-blob
# azure-storage-file-datalake
# azure-storage-file-share
@@ -429,7 +430,7 @@ urllib3==2.2.3
# via
# -c .hatch/requirements.txt
# requests
-validators==0.28.3
+validators==0.34.0
# via
# -c .hatch/requirements.txt
# simple-acme-dns
diff --git a/.hatch/requirements.txt b/.hatch/requirements.txt
index eb2f71c0e8..b0f7aff926 100644
--- a/.hatch/requirements.txt
+++ b/.hatch/requirements.txt
@@ -2,37 +2,37 @@
# This file is autogenerated by hatch-pip-compile with Python 3.12
#
# - appdirs==1.4.4
-# - azure-core==1.31.0
+# - azure-core==1.32.0
# - azure-identity==1.19.0
-# - azure-keyvault-certificates==4.8.0
-# - azure-keyvault-keys==4.9.0
-# - azure-keyvault-secrets==4.8.0
+# - azure-keyvault-certificates==4.9.0
+# - azure-keyvault-keys==4.10.0
+# - azure-keyvault-secrets==4.9.0
# - azure-mgmt-compute==33.0.0
# - azure-mgmt-containerinstance==10.1.0
-# - azure-mgmt-dns==8.1.0
+# - azure-mgmt-dns==8.2.0
# - azure-mgmt-keyvault==10.3.1
# - azure-mgmt-msi==7.0.0
# - azure-mgmt-rdbms==10.1.0
-# - azure-mgmt-resource==23.1.1
+# - azure-mgmt-resource==23.2.0
# - azure-mgmt-storage==21.2.1
-# - azure-storage-blob==12.23.1
-# - azure-storage-file-datalake==12.17.0
-# - azure-storage-file-share==12.19.0
+# - azure-storage-blob==12.24.0
+# - azure-storage-file-datalake==12.18.0
+# - azure-storage-file-share==12.20.0
# - chevron==0.14.0
-# - cryptography==43.0.1
+# - cryptography==43.0.3
# - fqdn==1.5.1
# - psycopg[binary]==3.1.19
-# - pulumi-azure-native==2.66.0
-# - pulumi-azuread==6.0.0
-# - pulumi-random==4.16.6
-# - pulumi==3.136.1
+# - pulumi-azure-native==2.72.0
+# - pulumi-azuread==6.0.1
+# - pulumi-random==4.16.7
+# - pulumi==3.139.0
# - pydantic==2.9.2
-# - pyjwt[crypto]==2.9.0
+# - pyjwt[crypto]==2.10.0
# - pytz==2024.2
# - pyyaml==6.0.2
-# - rich==13.9.2
-# - simple-acme-dns==3.1.0
-# - typer==0.12.5
+# - rich==13.9.4
+# - simple-acme-dns==3.2.0
+# - typer==0.13.0
# - websocket-client==1.8.0
#
@@ -56,7 +56,7 @@ azure-common==1.1.28
# azure-mgmt-rdbms
# azure-mgmt-resource
# azure-mgmt-storage
-azure-core==1.31.0
+azure-core==1.32.0
# via
# hatch.envs.default
# azure-identity
@@ -70,17 +70,17 @@ azure-core==1.31.0
# msrest
azure-identity==1.19.0
# via hatch.envs.default
-azure-keyvault-certificates==4.8.0
+azure-keyvault-certificates==4.9.0
# via hatch.envs.default
-azure-keyvault-keys==4.9.0
+azure-keyvault-keys==4.10.0
# via hatch.envs.default
-azure-keyvault-secrets==4.8.0
+azure-keyvault-secrets==4.9.0
# via hatch.envs.default
azure-mgmt-compute==33.0.0
# via hatch.envs.default
azure-mgmt-containerinstance==10.1.0
# via hatch.envs.default
-azure-mgmt-core==1.4.0
+azure-mgmt-core==1.5.0
# via
# azure-mgmt-compute
# azure-mgmt-containerinstance
@@ -90,7 +90,7 @@ azure-mgmt-core==1.4.0
# azure-mgmt-rdbms
# azure-mgmt-resource
# azure-mgmt-storage
-azure-mgmt-dns==8.1.0
+azure-mgmt-dns==8.2.0
# via hatch.envs.default
azure-mgmt-keyvault==10.3.1
# via hatch.envs.default
@@ -98,17 +98,17 @@ azure-mgmt-msi==7.0.0
# via hatch.envs.default
azure-mgmt-rdbms==10.1.0
# via hatch.envs.default
-azure-mgmt-resource==23.1.1
+azure-mgmt-resource==23.2.0
# via hatch.envs.default
azure-mgmt-storage==21.2.1
# via hatch.envs.default
-azure-storage-blob==12.23.1
+azure-storage-blob==12.24.0
# via
# hatch.envs.default
# azure-storage-file-datalake
-azure-storage-file-datalake==12.17.0
+azure-storage-file-datalake==12.18.0
# via hatch.envs.default
-azure-storage-file-share==12.19.0
+azure-storage-file-share==12.20.0
# via hatch.envs.default
certifi==2024.8.30
# via
@@ -122,7 +122,7 @@ chevron==0.14.0
# via hatch.envs.default
click==8.1.7
# via typer
-cryptography==43.0.1
+cryptography==43.0.3
# via
# hatch.envs.default
# acme
@@ -134,11 +134,11 @@ cryptography==43.0.1
# msal
# pyjwt
# pyopenssl
-debugpy==1.8.7
+debugpy==1.8.8
# via pulumi
dill==0.3.9
# via pulumi
-dnspython==2.6.1
+dnspython==2.7.0
# via simple-acme-dns
fqdn==1.5.1
# via hatch.envs.default
@@ -192,17 +192,17 @@ psycopg==3.1.19
# via hatch.envs.default
psycopg-binary==3.1.19
# via psycopg
-pulumi==3.136.1
+pulumi==3.139.0
# via
# hatch.envs.default
# pulumi-azure-native
# pulumi-azuread
# pulumi-random
-pulumi-azure-native==2.66.0
+pulumi-azure-native==2.72.0
# via hatch.envs.default
-pulumi-azuread==6.0.0
+pulumi-azuread==6.0.1
# via hatch.envs.default
-pulumi-random==4.16.6
+pulumi-random==4.16.7
# via hatch.envs.default
pycparser==2.22
# via cffi
@@ -212,7 +212,7 @@ pydantic-core==2.23.4
# via pydantic
pygments==2.18.0
# via rich
-pyjwt==2.9.0
+pyjwt==2.10.0
# via
# hatch.envs.default
# msal
@@ -220,13 +220,12 @@ pyopenssl==24.2.1
# via
# acme
# josepy
-pyrfc3339==1.1
+pyrfc3339==2.0.1
# via acme
pytz==2024.2
# via
# hatch.envs.default
# acme
- # pyrfc3339
pyyaml==6.0.2
# via
# hatch.envs.default
@@ -240,7 +239,7 @@ requests==2.32.3
# requests-oauthlib
requests-oauthlib==2.0.0
# via msrest
-rich==13.9.2
+rich==13.9.4
# via
# hatch.envs.default
# typer
@@ -252,13 +251,13 @@ semver==2.13.0
# pulumi-random
shellingham==1.5.4
# via typer
-simple-acme-dns==3.1.0
+simple-acme-dns==3.2.0
# via hatch.envs.default
six==1.16.0
# via
# azure-core
# pulumi
-typer==0.12.5
+typer==0.13.0
# via hatch.envs.default
typing-extensions==4.12.2
# via
@@ -268,7 +267,9 @@ typing-extensions==4.12.2
# azure-keyvault-keys
# azure-keyvault-secrets
# azure-mgmt-compute
+ # azure-mgmt-dns
# azure-mgmt-keyvault
+ # azure-mgmt-resource
# azure-storage-blob
# azure-storage-file-datalake
# azure-storage-file-share
@@ -278,7 +279,7 @@ typing-extensions==4.12.2
# typer
urllib3==2.2.3
# via requests
-validators==0.28.3
+validators==0.34.0
# via simple-acme-dns
websocket-client==1.8.0
# via hatch.envs.default
diff --git a/README.md b/README.md
index 622f76facd..95bdbac05e 100644
--- a/README.md
+++ b/README.md
@@ -46,69 +46,69 @@ See our [Code of Conduct](CODE_OF_CONDUCT.md) and our [Contributor Guide](CONTRI
![Alvaro Cabrejas Egea](https://avatars.githubusercontent.com/u/22940095?v=4?s=100) Alvaro Cabrejas Egea ๐ป ๐ |
+ ![Arielle Bennett](https://avatars.githubusercontent.com/u/74651964?v=4?s=100) Arielle Bennett ๐ ๐ค ๐ |
+ ![Benjamin Walden](https://avatars.githubusercontent.com/u/54804128?v=4?s=100) Benjamin Walden ๐ ๐ค ๐ ๐ ๐ |
+ ![Brett Todd](https://avatars.githubusercontent.com/u/62715658?v=4?s=100) Brett Todd ๐ป ๐ค |
![Callum Mole](https://avatars.githubusercontent.com/u/22677759?v=4?s=100) Callum Mole ๐ ๐ป |
- ![Carlos Gavidia-Calderon](https://avatars.githubusercontent.com/u/1616531?v=4?s=100) Carlos Gavidia-Calderon ๐ |
+ ![Carlos Gavidia-Calderon](https://avatars.githubusercontent.com/u/1616531?v=4?s=100) Carlos Gavidia-Calderon ๐ ๐ค ๐ |
![Catalina Vallejos](https://avatars.githubusercontent.com/u/7511093?v=4?s=100) Catalina Vallejos ๐ |
- ![Christopher Edsall](https://avatars.githubusercontent.com/u/1021204?v=4?s=100) Christopher Edsall ๐ป ๐ ๐ |
- ![DDelbarre](https://avatars.githubusercontent.com/u/108824056?v=4?s=100) DDelbarre ๐ |
- ![Daniel](https://avatars.githubusercontent.com/u/49038294?v=4?s=100) Daniel ๐ป ๐ |
+ ![Catherine Lawrence](https://avatars.githubusercontent.com/u/38755168?v=4?s=100) Catherine Lawrence ๐ ๐ ๐ ๐ค |
+ ![Christopher Edsall](https://avatars.githubusercontent.com/u/1021204?v=4?s=100) Christopher Edsall ๐ป ๐ ๐ |
+ ![Daniel Allen](https://avatars.githubusercontent.com/u/49038294?v=4?s=100) Daniel Allen ๐ป ๐ |
+ ![Daniel Delbarre](https://avatars.githubusercontent.com/u/108824056?v=4?s=100) Daniel Delbarre ๐ |
![David Beavan](https://avatars.githubusercontent.com/u/6524799?v=4?s=100) David Beavan ๐ ๐ |
- ![David Salvador Jasin](https://avatars.githubusercontent.com/u/57944311?v=4?s=100) David Salvador Jasin ๐ ๐ |
+ ![David Salvador Jasin](https://avatars.githubusercontent.com/u/57944311?v=4?s=100) David Salvador Jasin ๐ ๐ ๐ค |
+ ![David Sarmiento Perez](https://avatars.githubusercontent.com/u/118986872?v=4?s=100) David Sarmiento Perez ๐ ๐ ๐ ๐ค ๐ ๐ฃ ๐ข |
+
+
![Diego Arenas](https://avatars.githubusercontent.com/u/7409896?v=4?s=100) Diego Arenas ๐ป ๐ค ๐ |
![Ed Chalstrey](https://avatars.githubusercontent.com/u/5486164?v=4?s=100) Ed Chalstrey ๐ป ๐ ๐ ๐ค ๐ ๐ โ ๏ธ |
![Evelina Gabasova](https://avatars.githubusercontent.com/u/5541162?v=4?s=100) Evelina Gabasova ๐ |
![Federico Nanni](https://avatars.githubusercontent.com/u/8415204?v=4?s=100) Federico Nanni ๐ป ๐ ๐ ๐ค |
![Franz Kirรกly](https://avatars.githubusercontent.com/u/7985502?v=4?s=100) Franz Kirรกly ๐ |
+ ![George Holmes](https://avatars.githubusercontent.com/u/62715301?v=4?s=100) George Holmes ๐ป ๐ค |
+ ![Guillaume Noell](https://avatars.githubusercontent.com/u/50482094?v=4?s=100) Guillaume Noell ๐ ๐ ๐ค |
- ![Guillaume Noell](https://avatars.githubusercontent.com/u/50482094?v=4?s=100) Guillaume Noell ๐ ๐ ๐ค |
- ![Helen D Little](https://avatars.githubusercontent.com/u/46891265?v=4?s=100) Helen D Little ๐ ๐ |
+ ![Hari Sood](https://avatars.githubusercontent.com/u/67151373?v=4?s=100) Hari Sood ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ข ๐ก๏ธ ๐ |
+ ![Helen Duncan Little](https://avatars.githubusercontent.com/u/46891265?v=4?s=100) Helen Duncan Little ๐ ๐ ๐ค |
![Helen Sherwood-Taylor](https://avatars.githubusercontent.com/u/217966?v=4?s=100) Helen Sherwood-Taylor ๐ค ๐ |
+ ![Ian Carter](https://avatars.githubusercontent.com/u/34555297?v=4?s=100) Ian Carter ๐ป ๐ |
![Jack Roberts](https://avatars.githubusercontent.com/u/16308271?v=4?s=100) Jack Roberts ๐ป ๐ |
![James Cunningham](https://avatars.githubusercontent.com/u/150765?v=4?s=100) James Cunningham ๐ป ๐ ๐ ๐ค ๐ |
![James Geddes](https://avatars.githubusercontent.com/u/1172905?v=4?s=100) James Geddes ๐ |
- ![James Hetherington](https://avatars.githubusercontent.com/u/55009?v=4?s=100) James Hetherington ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ข ๐ |
+ ![James Hetherington](https://avatars.githubusercontent.com/u/55009?v=4?s=100) James Hetherington ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ข ๐ |
![James Robinson](https://avatars.githubusercontent.com/u/3502751?v=4?s=100) James Robinson ๐ป ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
- ![Jim Madge](https://avatars.githubusercontent.com/u/23616154?v=4?s=100) Jim Madge ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ |
+ ![Jim Madge](https://avatars.githubusercontent.com/u/23616154?v=4?s=100) Jim Madge ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
![Josh Everett](https://avatars.githubusercontent.com/u/17052866?v=4?s=100) Josh Everett ๐ |
- ![Jules M](https://avatars.githubusercontent.com/u/40864686?v=4?s=100) Jules M ๐ ๐ค ๐ ๐ |
+ ![Jules Manser](https://avatars.githubusercontent.com/u/40864686?v=4?s=100) Jules Manser ๐ ๐ค ๐ ๐ |
+ ![Kevin Xu](https://avatars.githubusercontent.com/u/48526846?v=4?s=100) Kevin Xu ๐ ๐ค ๐ก๏ธ |
![Kirstie Whitaker](https://avatars.githubusercontent.com/u/3626306?v=4?s=100) Kirstie Whitaker ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ข ๐ |
+
+
![Martin O'Reilly](https://avatars.githubusercontent.com/u/21147592?v=4?s=100) Martin O'Reilly ๐ป ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
![Matt Craddock](https://avatars.githubusercontent.com/u/5796417?v=4?s=100) Matt Craddock ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ |
+ ![Matt Westby](https://avatars.githubusercontent.com/u/91054185?v=4?s=100) Matt Westby ๐ |
+ ![Miguel Morin](https://avatars.githubusercontent.com/u/32396311?v=4?s=100) Miguel Morin ๐ป ๐ ๐ค โ ๏ธ |
+ ![Oliver Forrest](https://avatars.githubusercontent.com/u/49275282?v=4?s=100) Oliver Forrest ๐ ๐ค ๐ ๐ฃ ๐ |
+ ![Oscar T Giles](https://avatars.githubusercontent.com/u/12784013?v=4?s=100) Oscar T Giles ๐ป ๐ ๐ค |
+ ![Rachel Winstanley](https://avatars.githubusercontent.com/u/56362072?v=4?s=100) Rachel Winstanley ๐ ๐ค ๐ ๐ก๏ธ |
- ![Oscar T Giles](https://avatars.githubusercontent.com/u/12784013?v=4?s=100) Oscar T Giles ๐ป ๐ ๐ค |
![Radka Jersakova](https://avatars.githubusercontent.com/u/29207091?v=4?s=100) Radka Jersakova ๐ |
![Rob Clarke](https://avatars.githubusercontent.com/u/29575619?v=4?s=100) Rob Clarke ๐ค ๐ ๐ป ๐ ๐ |
+ ![Sebastian Vollmer](https://avatars.githubusercontent.com/u/12613127?v=4?s=100) Sebastian Vollmer ๐ ๐ ๐ค ๐ |
![Steven Carlysle-Davies](https://avatars.githubusercontent.com/u/5108635?v=4?s=100) Steven Carlysle-Davies ๐ป ๐ ๐ค |
![Tim Hobson](https://avatars.githubusercontent.com/u/26117394?v=4?s=100) Tim Hobson ๐ป ๐ ๐ ๐ค |
![Tom Doel](https://avatars.githubusercontent.com/u/4216900?v=4?s=100) Tom Doel ๐ป ๐ ๐ ๐ค ๐ |
![Tomas Lazauskas](https://avatars.githubusercontent.com/u/12182911?v=4?s=100) Tomas Lazauskas ๐ป ๐ ๐ ๐ค |
- ![arielle-bennett](https://avatars.githubusercontent.com/u/74651964?v=4?s=100) arielle-bennett ๐ ๐ค ๐ |
- ![bw-faststream](https://avatars.githubusercontent.com/u/54804128?v=4?s=100) bw-faststream ๐ ๐ค ๐ ๐ ๐ |
- ![cathiest](https://avatars.githubusercontent.com/u/38755168?v=4?s=100) cathiest ๐ ๐ ๐ ๐ค |
- ![davsarper](https://avatars.githubusercontent.com/u/118986872?v=4?s=100) davsarper ๐ ๐ ๐ ๐ค ๐ ๐ฃ ๐ข |
- ![ens-brett-todd](https://avatars.githubusercontent.com/u/62715658?v=4?s=100) ens-brett-todd ๐ป ๐ค |
- ![ens-george-holmes](https://avatars.githubusercontent.com/u/62715301?v=4?s=100) ens-george-holmes ๐ป ๐ค |
- ![getcarter21](https://avatars.githubusercontent.com/u/34555297?v=4?s=100) getcarter21 ๐ป ๐ |
-
-
- ![harisood](https://avatars.githubusercontent.com/u/67151373?v=4?s=100) harisood ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ข ๐ก๏ธ ๐ |
- ![kevinxufs](https://avatars.githubusercontent.com/u/48526846?v=4?s=100) kevinxufs ๐ ๐ค ๐ก๏ธ |
- ![mattwestby](https://avatars.githubusercontent.com/u/91054185?v=4?s=100) mattwestby ๐ |
- ![miguelmorin](https://avatars.githubusercontent.com/u/32396311?v=4?s=100) miguelmorin ๐ป ๐ ๐ค โ ๏ธ |
- ![oforrest](https://avatars.githubusercontent.com/u/49275282?v=4?s=100) oforrest ๐ ๐ค ๐ ๐ฃ ๐ |
- ![rwinstanley1](https://avatars.githubusercontent.com/u/56362072?v=4?s=100) rwinstanley1 ๐ ๐ค ๐ ๐ก๏ธ |
- ![vollmersj](https://avatars.githubusercontent.com/u/12613127?v=4?s=100) vollmersj ๐ ๐ ๐ค ๐ |
-
-
- ![warwick26](https://avatars.githubusercontent.com/u/33690673?v=4?s=100) warwick26 ๐ป ๐ค |
+ ![Warwick Wood](https://avatars.githubusercontent.com/u/33690673?v=4?s=100) Warwick Wood ๐ป ๐ค |
diff --git a/SECURITY.md b/SECURITY.md
index db056c976b..c81368a94e 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -7,8 +7,8 @@ All organisations using an earlier version in production should update to the la
| Version | Supported |
| --------------------------------------------------------------------------------------- | ------------------ |
-| [5.0.1](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v5.0.1) | :white_check_mark: |
-| < 5.0.1 | :x: |
+| [5.1.0](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v5.1.0) | :white_check_mark: |
+| < 5.1.0 | :x: |
## Reporting a Vulnerability
diff --git a/data_safe_haven/commands/config.py b/data_safe_haven/commands/config.py
index a774868516..4d096f45ac 100644
--- a/data_safe_haven/commands/config.py
+++ b/data_safe_haven/commands/config.py
@@ -107,7 +107,10 @@ def available() -> None:
@config_command_group.command()
def show(
- name: Annotated[str, typer.Argument(help="Name of SRE to show")],
+ name: Annotated[
+ str,
+ typer.Argument(help="Name of SRE to show"),
+ ],
file: Annotated[
Optional[Path], # noqa: UP007
typer.Option(help="File path to write configuration template to."),
diff --git a/data_safe_haven/commands/pulumi.py b/data_safe_haven/commands/pulumi.py
index 7ad9506f0b..13f0795de0 100644
--- a/data_safe_haven/commands/pulumi.py
+++ b/data_safe_haven/commands/pulumi.py
@@ -6,8 +6,7 @@
import typer
from data_safe_haven import console
-from data_safe_haven.config import ContextManager, DSHPulumiConfig, SHMConfig, SREConfig
-from data_safe_haven.external import GraphApi
+from data_safe_haven.config import ContextManager, DSHPulumiConfig, SREConfig
from data_safe_haven.infrastructure import SREProjectManager
pulumi_command_group = typer.Typer()
@@ -33,24 +32,12 @@ def run(
"""Run arbitrary Pulumi commands in a DSH project"""
context = ContextManager.from_file().assert_context()
pulumi_config = DSHPulumiConfig.from_remote(context)
- shm_config = SHMConfig.from_remote(context)
sre_config = SREConfig.from_remote_by_name(context, sre_name)
- graph_api = GraphApi.from_scopes(
- scopes=[
- "Application.ReadWrite.All",
- "AppRoleAssignment.ReadWrite.All",
- "Directory.ReadWrite.All",
- "Group.ReadWrite.All",
- ],
- tenant_id=shm_config.shm.entra_tenant_id,
- )
-
project = SREProjectManager(
context=context,
config=sre_config,
pulumi_config=pulumi_config,
- graph_api_token=graph_api.token,
)
stdout = project.run_pulumi_command(command)
diff --git a/data_safe_haven/commands/shm.py b/data_safe_haven/commands/shm.py
index 522609b8ff..c25012d46d 100644
--- a/data_safe_haven/commands/shm.py
+++ b/data_safe_haven/commands/shm.py
@@ -140,9 +140,21 @@ def teardown() -> None:
# Teardown Data Safe Haven SHM infrastructure.
try:
- config = SHMConfig.from_remote(context)
- shm_infra = ImperativeSHM(context, config)
- shm_infra.teardown()
+ if SHMConfig.remote_exists(context):
+ config = SHMConfig.from_remote(context)
+ shm_infra = ImperativeSHM(context, config)
+ console.print(
+ "Tearing down the Safe Haven Management environment will permanently delete all associated resources, including remotely stored configurations."
+ )
+ if not console.confirm(
+ "Do you wish to continue tearing down the SHM?", default_to_yes=False
+ ):
+ console.print("SHM teardown cancelled by user.")
+ raise typer.Exit(0)
+ shm_infra.teardown()
+ else:
+ logger.critical(f"No deployed SHM found for context [green]{context.name}.")
+ raise typer.Exit(1)
except DataSafeHavenError as exc:
logger.critical("Could not teardown Safe Haven Management environment.")
raise typer.Exit(1) from exc
diff --git a/data_safe_haven/commands/sre.py b/data_safe_haven/commands/sre.py
index f03f0cc53e..8c3e0b5cdc 100644
--- a/data_safe_haven/commands/sre.py
+++ b/data_safe_haven/commands/sre.py
@@ -4,6 +4,7 @@
import typer
+from data_safe_haven import console
from data_safe_haven.config import ContextManager, DSHPulumiConfig, SHMConfig, SREConfig
from data_safe_haven.exceptions import DataSafeHavenConfigError, DataSafeHavenError
from data_safe_haven.external import AzureSdk, GraphApi
@@ -66,7 +67,6 @@ def deploy(
config=sre_config,
pulumi_config=pulumi_config,
create_project=True,
- graph_api_token=graph_api.token,
)
# Set Azure options
stack.add_option(
@@ -99,7 +99,9 @@ def deploy(
if not application:
msg = f"No Entra application '{context.entra_application_name}' was found. Please redeploy your SHM."
raise DataSafeHavenConfigError(msg)
- stack.add_option("azuread:clientId", application.get("appId", ""), replace=True)
+ stack.add_option(
+ "azuread:clientId", application.get("appId", ""), replace=False
+ )
if not context.entra_application_secret:
msg = f"No Entra application secret '{context.entra_application_secret_name}' was found. Please redeploy your SHM."
raise DataSafeHavenConfigError(msg)
@@ -107,7 +109,7 @@ def deploy(
"azuread:clientSecret", context.entra_application_secret, replace=True
)
stack.add_option(
- "azuread:tenantId", shm_config.shm.entra_tenant_id, replace=True
+ "azuread:tenantId", shm_config.shm.entra_tenant_id, replace=False
)
# Load SHM outputs
stack.add_option(
@@ -153,7 +155,6 @@ def deploy(
# Provision SRE with anything that could not be done in Pulumi
manager = SREProvisioningManager(
- graph_api_token=graph_api.token,
location=sre_config.azure.location,
sre_name=sre_config.name,
sre_stack=stack,
@@ -183,20 +184,24 @@ def teardown(
"""Tear down a deployed a Secure Research Environment."""
logger = get_logger()
try:
- # Load context and SHM config
+ # Load context
context = ContextManager.from_file().assert_context()
- shm_config = SHMConfig.from_remote(context)
-
- # Load GraphAPI as this may require user-interaction
- graph_api = GraphApi.from_scopes(
- scopes=["Application.ReadWrite.All", "Group.ReadWrite.All"],
- tenant_id=shm_config.shm.entra_tenant_id,
- )
# Load Pulumi and SRE configs
pulumi_config = DSHPulumiConfig.from_remote(context)
sre_config = SREConfig.from_remote_by_name(context, name)
+ console.print(
+ "Tearing down the Secure Research Environment will permanently delete all associated resources, "
+ "including all data stored in the environment.\n"
+ "Ensure that any desired outputs have been extracted before continuing."
+ )
+ if not console.confirm(
+ "Do you wish to continue tearing down the SRE?", default_to_yes=False
+ ):
+ console.print("SRE teardown cancelled by user.")
+ raise typer.Exit(0)
+
# Check whether current IP address is authorised to take administrator actions
if not ip_address_in_list(sre_config.sre.admin_ip_addresses):
logger.warning(
@@ -212,7 +217,6 @@ def teardown(
context=context,
config=sre_config,
pulumi_config=pulumi_config,
- graph_api_token=graph_api.token,
create_project=True,
)
stack.teardown(force=force)
diff --git a/data_safe_haven/config/config_sections.py b/data_safe_haven/config/config_sections.py
index 35b9570a7e..62bfec0833 100644
--- a/data_safe_haven/config/config_sections.py
+++ b/data_safe_haven/config/config_sections.py
@@ -57,7 +57,7 @@ class ConfigSectionSRE(BaseModel, validate_assignment=True):
admin_email_address: EmailAddress
admin_ip_addresses: list[IpAddress] = []
databases: UniqueList[DatabaseSystem] = []
- data_provider_ip_addresses: list[IpAddress] = []
+ data_provider_ip_addresses: list[IpAddress] | AzureServiceTag = []
remote_desktop: ConfigSubsectionRemoteDesktopOpts
research_user_ip_addresses: list[IpAddress] | AzureServiceTag = []
storage_quota_gb: ConfigSubsectionStorageQuotaGB
@@ -67,8 +67,6 @@ class ConfigSectionSRE(BaseModel, validate_assignment=True):
@field_validator(
"admin_ip_addresses",
- "data_provider_ip_addresses",
- # "research_user_ip_addresses",
mode="after",
)
@classmethod
@@ -81,6 +79,7 @@ def ensure_non_overlapping(cls, v: list[IpAddress]) -> list[IpAddress]:
return v
@field_validator(
+ "data_provider_ip_addresses",
"research_user_ip_addresses",
mode="after",
)
diff --git a/data_safe_haven/config/sre_config.py b/data_safe_haven/config/sre_config.py
index 9fba89e12f..53adb673e0 100644
--- a/data_safe_haven/config/sre_config.py
+++ b/data_safe_haven/config/sre_config.py
@@ -4,9 +4,8 @@
from typing import ClassVar, Self
-from data_safe_haven.functions import json_safe
from data_safe_haven.serialisers import AzureSerialisableModel, ContextBase
-from data_safe_haven.types import SafeString, SoftwarePackageCategory
+from data_safe_haven.types import SafeSreName, SoftwarePackageCategory
from .config_sections import (
ConfigSectionAzure,
@@ -18,8 +17,8 @@
def sre_config_name(sre_name: str) -> str:
- """Construct a safe YAML filename given an input SRE name."""
- return f"sre-{json_safe(sre_name)}.yaml"
+ """Construct a YAML filename given an input SRE name."""
+ return f"sre-{sre_name}.yaml"
class SREConfig(AzureSerialisableModel):
@@ -31,7 +30,7 @@ class SREConfig(AzureSerialisableModel):
azure: ConfigSectionAzure
description: str
dockerhub: ConfigSectionDockerHub
- name: SafeString
+ name: SafeSreName
sre: ConfigSectionSRE
@property
diff --git a/data_safe_haven/external/api/graph_api.py b/data_safe_haven/external/api/graph_api.py
index 66cf139a65..7d3b088672 100644
--- a/data_safe_haven/external/api/graph_api.py
+++ b/data_safe_haven/external/api/graph_api.py
@@ -17,6 +17,11 @@
DataSafeHavenValueError,
)
from data_safe_haven.logging import get_logger, get_null_logger
+from data_safe_haven.types import (
+ EntraApplicationId,
+ EntraAppPermissionType,
+ EntraSignInAudienceType,
+)
from .credentials import DeferredCredential, GraphApiCredential
@@ -24,9 +29,6 @@
class GraphApi:
"""Interface to the Microsoft Graph REST API"""
- application_ids: ClassVar[dict[str, str]] = {
- "Microsoft Graph": "00000003-0000-0000-c000-000000000000",
- }
role_template_ids: ClassVar[dict[str, str]] = {
"Global Administrator": "62e90394-69f5-4237-9190-012177145e10"
}
@@ -34,6 +36,7 @@ class GraphApi:
"Application.ReadWrite.All": "1bfefb4e-e0b5-418b-a88f-73c46d2cc8e9",
"AppRoleAssignment.ReadWrite.All": "06b708a9-e830-4db3-a914-8e69da51d44f",
"Directory.Read.All": "7ab1d382-f21e-4acd-a863-ba3e13f7da61",
+ "Directory.ReadWrite.All": "19dbc75e-c2e2-444c-a770-ec69d8559fc7",
"Domain.Read.All": "dbb9058a-0e50-45d7-ae91-66909b5d4664",
"Group.Read.All": "5b567255-7703-4780-807c-7be8301ae99b",
"Group.ReadWrite.All": "62a82d76-70ea-41e2-9197-370581804d09",
@@ -192,7 +195,6 @@ def create_application(
if not request_json:
request_json = {
"displayName": application_name,
- "signInAudience": "AzureADMyOrg",
"passwordCredentials": [],
"publicClient": {
"redirectUris": [
@@ -200,25 +202,26 @@ def create_application(
"urn:ietf:wg:oauth:2.0:oob",
]
},
+ "signInAudience": EntraSignInAudienceType.THIS_TENANT.value,
}
# Add scopes if there are any
scopes = [
{
"id": self.uuid_application[application_scope],
- "type": "Role", # 'Role' is the type for application permissions
+ "type": EntraAppPermissionType.APPLICATION.value,
}
for application_scope in application_scopes
] + [
{
"id": self.uuid_delegated[delegated_scope],
- "type": "Scope", # 'Scope' is the type for delegated permissions
+ "type": EntraAppPermissionType.DELEGATED.value,
}
for delegated_scope in delegated_scopes
]
if scopes:
request_json["requiredResourceAccess"] = [
{
- "resourceAppId": self.application_ids["Microsoft Graph"],
+ "resourceAppId": EntraApplicationId.MICROSOFT_GRAPH.value,
"resourceAccess": scopes,
}
]
@@ -589,9 +592,9 @@ def grant_application_role_permissions(
f"Assigning application role '[green]{application_role_name}[/]' to '{application_name}'...",
)
request_json = {
+ "appRoleId": app_role_id,
"principalId": application_sp["id"],
"resourceId": microsoft_graph_sp["id"],
- "appRoleId": app_role_id,
}
self.http_post(
f"{self.base_endpoint}/servicePrincipals/{microsoft_graph_sp['id']}/appRoleAssignments",
diff --git a/data_safe_haven/functions/__init__.py b/data_safe_haven/functions/__init__.py
index e11b326135..4a83a76463 100644
--- a/data_safe_haven/functions/__init__.py
+++ b/data_safe_haven/functions/__init__.py
@@ -3,7 +3,6 @@
alphanumeric,
b64encode,
get_key_vault_name,
- json_safe,
next_occurrence,
password,
replace_separators,
@@ -18,7 +17,6 @@
"current_ip_address",
"get_key_vault_name",
"ip_address_in_list",
- "json_safe",
"next_occurrence",
"password",
"replace_separators",
diff --git a/data_safe_haven/functions/strings.py b/data_safe_haven/functions/strings.py
index 0d5b06b33e..bf229c4f5e 100644
--- a/data_safe_haven/functions/strings.py
+++ b/data_safe_haven/functions/strings.py
@@ -27,11 +27,6 @@ def get_key_vault_name(stack_name: str) -> str:
return f"{''.join(truncate_tokens(stack_name.split('-'), 17))}secrets"
-def json_safe(input_string: str) -> str:
- """Construct a JSON-safe version of an input string"""
- return alphanumeric(input_string).lower()
-
-
def next_occurrence(
hour: int, minute: int, timezone: str, *, time_format: str = "iso"
) -> str:
diff --git a/data_safe_haven/infrastructure/components/__init__.py b/data_safe_haven/infrastructure/components/__init__.py
index 7531491bf8..2b3dd67e7a 100644
--- a/data_safe_haven/infrastructure/components/__init__.py
+++ b/data_safe_haven/infrastructure/components/__init__.py
@@ -1,4 +1,7 @@
from .composite import (
+ EntraApplicationComponent,
+ EntraDesktopApplicationProps,
+ EntraWebApplicationProps,
LinuxVMComponentProps,
LocalDnsRecordComponent,
LocalDnsRecordProps,
@@ -13,8 +16,6 @@
from .dynamic import (
BlobContainerAcl,
BlobContainerAclProps,
- EntraApplication,
- EntraApplicationProps,
FileShareFile,
FileShareFileProps,
SSLCertificate,
@@ -28,8 +29,9 @@
__all__ = [
"BlobContainerAcl",
"BlobContainerAclProps",
- "EntraApplication",
- "EntraApplicationProps",
+ "EntraApplicationComponent",
+ "EntraDesktopApplicationProps",
+ "EntraWebApplicationProps",
"FileShareFile",
"FileShareFileProps",
"LinuxVMComponentProps",
diff --git a/data_safe_haven/infrastructure/components/composite/__init__.py b/data_safe_haven/infrastructure/components/composite/__init__.py
index e4254a50ed..bc09bc18a8 100644
--- a/data_safe_haven/infrastructure/components/composite/__init__.py
+++ b/data_safe_haven/infrastructure/components/composite/__init__.py
@@ -1,3 +1,8 @@
+from .entra_application import (
+ EntraApplicationComponent,
+ EntraDesktopApplicationProps,
+ EntraWebApplicationProps,
+)
from .local_dns_record import LocalDnsRecordComponent, LocalDnsRecordProps
from .microsoft_sql_database import (
MicrosoftSQLDatabaseComponent,
@@ -8,6 +13,9 @@
from .virtual_machine import LinuxVMComponentProps, VMComponent
__all__ = [
+ "EntraApplicationComponent",
+ "EntraDesktopApplicationProps",
+ "EntraWebApplicationProps",
"LinuxVMComponentProps",
"LocalDnsRecordComponent",
"LocalDnsRecordProps",
diff --git a/data_safe_haven/infrastructure/components/composite/entra_application.py b/data_safe_haven/infrastructure/components/composite/entra_application.py
new file mode 100644
index 0000000000..e5bcff949b
--- /dev/null
+++ b/data_safe_haven/infrastructure/components/composite/entra_application.py
@@ -0,0 +1,163 @@
+"""Pulumi component for an Entra Application resource"""
+
+from collections.abc import Mapping
+from typing import Any
+
+import pulumi_azuread as entra
+from pulumi import ComponentResource, Input, Output, ResourceOptions
+
+from data_safe_haven.functions import replace_separators
+from data_safe_haven.types import EntraAppPermissionType, EntraSignInAudienceType
+
+
+class EntraApplicationProps:
+ """Properties for EntraApplicationComponent"""
+
+ def __init__(
+ self,
+ application_name: Input[str],
+ application_permissions: list[tuple[EntraAppPermissionType, str]],
+ msgraph_service_principal: Input[entra.ServicePrincipal],
+ application_kwargs: Mapping[str, Any],
+ ) -> None:
+ self.application_name = application_name
+ self.application_permissions = application_permissions
+ self.msgraph_client_id = msgraph_service_principal.client_id
+ self.msgraph_object_id = msgraph_service_principal.object_id
+ self.application_kwargs = application_kwargs
+
+ # Construct a mapping of all the available application permissions
+ self.msgraph_permissions: Output[dict[str, Mapping[str, str]]] = Output.all(
+ application=msgraph_service_principal.app_role_ids,
+ delegated=msgraph_service_principal.oauth2_permission_scope_ids,
+ ).apply(
+ lambda kwargs: {
+ EntraAppPermissionType.APPLICATION: kwargs["application"],
+ EntraAppPermissionType.DELEGATED: kwargs["delegated"],
+ }
+ )
+
+
+class EntraDesktopApplicationProps(EntraApplicationProps):
+ """
+ Properties for a desktop EntraApplicationComponent.
+ See https://learn.microsoft.com/en-us/entra/identity-platform/msal-client-applications)
+ """
+
+ def __init__(
+ self,
+ application_name: Input[str],
+ application_permissions: list[tuple[EntraAppPermissionType, str]],
+ msgraph_service_principal: Input[entra.ServicePrincipal],
+ ):
+ super().__init__(
+ application_name=application_name,
+ application_kwargs={
+ "public_client": entra.ApplicationPublicClientArgs(
+ redirect_uris=["urn:ietf:wg:oauth:2.0:oob"]
+ )
+ },
+ application_permissions=application_permissions,
+ msgraph_service_principal=msgraph_service_principal,
+ )
+
+
+class EntraWebApplicationProps(EntraApplicationProps):
+ """
+ Properties for a web EntraApplicationComponent.
+ See https://learn.microsoft.com/en-us/entra/identity-platform/msal-client-applications)
+ """
+
+ def __init__(
+ self,
+ application_name: Input[str],
+ application_permissions: list[tuple[EntraAppPermissionType, str]],
+ msgraph_service_principal: Input[entra.ServicePrincipal],
+ redirect_url: Input[str],
+ ):
+ super().__init__(
+ application_name=application_name,
+ application_kwargs={
+ "web": entra.ApplicationWebArgs(
+ redirect_uris=[redirect_url],
+ implicit_grant=entra.ApplicationWebImplicitGrantArgs(
+ id_token_issuance_enabled=True,
+ ),
+ )
+ },
+ application_permissions=application_permissions,
+ msgraph_service_principal=msgraph_service_principal,
+ )
+
+
+class EntraApplicationComponent(ComponentResource):
+ """Deploy an Entra application with Pulumi"""
+
+ def __init__(
+ self,
+ name: str,
+ props: EntraApplicationProps,
+ opts: ResourceOptions | None = None,
+ ) -> None:
+ super().__init__("dsh:common:EntraApplicationComponent", name, {}, opts)
+
+ # Create the application
+ self.application = entra.Application(
+ f"{self._name}_application",
+ display_name=props.application_name,
+ prevent_duplicate_names=True,
+ required_resource_accesses=(
+ [
+ entra.ApplicationRequiredResourceAccessArgs(
+ resource_accesses=[
+ entra.ApplicationRequiredResourceAccessResourceAccessArgs(
+ id=props.msgraph_permissions[permission_type][
+ permission
+ ],
+ type=permission_type.value,
+ )
+ for permission_type, permission in props.application_permissions
+ ],
+ resource_app_id=props.msgraph_client_id,
+ )
+ ]
+ if props.application_permissions
+ else []
+ ),
+ sign_in_audience=EntraSignInAudienceType.THIS_TENANT.value,
+ **props.application_kwargs,
+ )
+
+ # Get the service principal for this application
+ self.application_service_principal = entra.ServicePrincipal(
+ f"{self._name}_application_service_principal",
+ client_id=self.application.client_id,
+ )
+
+ # Grant admin approval for requested application permissions
+ [
+ entra.AppRoleAssignment(
+ replace_separators(
+ f"{self._name}_application_role_grant_{permission_type.value}_{permission}",
+ "_",
+ ).lower(),
+ app_role_id=props.msgraph_permissions[permission_type][permission],
+ principal_object_id=self.application_service_principal.object_id,
+ resource_object_id=props.msgraph_object_id,
+ )
+ for permission_type, permission in props.application_permissions
+ if permission_type == EntraAppPermissionType.APPLICATION
+ ]
+ [
+ entra.ServicePrincipalDelegatedPermissionGrant(
+ replace_separators(
+ f"{self._name}_application_delegated_grant_{permission_type.value}_{permission}",
+ "_",
+ ).lower(),
+ claim_values=[permission],
+ resource_service_principal_object_id=props.msgraph_object_id,
+ service_principal_object_id=self.application_service_principal.object_id,
+ )
+ for permission_type, permission in props.application_permissions
+ if permission_type == EntraAppPermissionType.DELEGATED
+ ]
diff --git a/data_safe_haven/infrastructure/components/composite/virtual_machine.py b/data_safe_haven/infrastructure/components/composite/virtual_machine.py
index 71fed08246..9471991a4a 100644
--- a/data_safe_haven/infrastructure/components/composite/virtual_machine.py
+++ b/data_safe_haven/infrastructure/components/composite/virtual_machine.py
@@ -199,6 +199,9 @@ def __init__(
),
),
vm_name=props.vm_name,
+ identity=compute.VirtualMachineIdentityArgs(
+ type=compute.ResourceIdentityType.SYSTEM_ASSIGNED,
+ ),
opts=ResourceOptions.merge(
child_opts,
ResourceOptions(
diff --git a/data_safe_haven/infrastructure/components/dynamic/__init__.py b/data_safe_haven/infrastructure/components/dynamic/__init__.py
index 429fc8470d..78ecfbcef1 100644
--- a/data_safe_haven/infrastructure/components/dynamic/__init__.py
+++ b/data_safe_haven/infrastructure/components/dynamic/__init__.py
@@ -1,13 +1,10 @@
from .blob_container_acl import BlobContainerAcl, BlobContainerAclProps
-from .entra_application import EntraApplication, EntraApplicationProps
from .file_share_file import FileShareFile, FileShareFileProps
from .ssl_certificate import SSLCertificate, SSLCertificateProps
__all__ = [
"BlobContainerAcl",
"BlobContainerAclProps",
- "EntraApplication",
- "EntraApplicationProps",
"FileShareFile",
"FileShareFileProps",
"SSLCertificate",
diff --git a/data_safe_haven/infrastructure/components/dynamic/entra_application.py b/data_safe_haven/infrastructure/components/dynamic/entra_application.py
deleted file mode 100644
index fd2d233137..0000000000
--- a/data_safe_haven/infrastructure/components/dynamic/entra_application.py
+++ /dev/null
@@ -1,191 +0,0 @@
-"""Pulumi dynamic component for Entra applications."""
-
-from contextlib import suppress
-from typing import Any
-
-from pulumi import Input, Output, ResourceOptions
-from pulumi.dynamic import CreateResult, DiffResult, Resource, UpdateResult
-
-from data_safe_haven.exceptions import DataSafeHavenMicrosoftGraphError
-from data_safe_haven.external import GraphApi
-
-from .dsh_resource_provider import DshResourceProvider
-
-
-class EntraApplicationProps:
- """Props for the EntraApplication class"""
-
- def __init__(
- self,
- application_name: Input[str],
- application_role_assignments: Input[list[str]] | None = None,
- application_secret_name: Input[str] | None = None,
- delegated_role_assignments: Input[list[str]] | None = None,
- public_client_redirect_uri: Input[str] | None = None,
- web_redirect_url: Input[str] | None = None,
- ) -> None:
- self.application_name = application_name
- self.application_role_assignments = application_role_assignments
- self.application_secret_name = application_secret_name
- self.delegated_role_assignments = delegated_role_assignments
- self.public_client_redirect_uri = public_client_redirect_uri
- self.web_redirect_url = web_redirect_url
-
-
-class EntraApplicationProvider(DshResourceProvider):
- def __init__(self, auth_token: str):
- self.auth_token = auth_token
- super().__init__()
-
- def create(self, props: dict[str, Any]) -> CreateResult:
- """Create new Entra application."""
- outs = dict(**props)
- try:
- graph_api = GraphApi.from_token(self.auth_token, disable_logging=True)
- request_json = {
- "displayName": props["application_name"],
- "signInAudience": "AzureADMyOrg",
- }
- # Add a web redirection URL if requested
- if props.get("web_redirect_url", None):
- request_json["web"] = {
- "redirectUris": [props["web_redirect_url"]],
- "implicitGrantSettings": {"enableIdTokenIssuance": True},
- }
- # Add a public client redirection URL if requested
- if props.get("public_client_redirect_uri", None):
- request_json["publicClient"] = {
- "redirectUris": [props["public_client_redirect_uri"]],
- }
- json_response = graph_api.create_application(
- props["application_name"],
- application_scopes=props.get("application_role_assignments", []),
- delegated_scopes=props.get("delegated_role_assignments", []),
- request_json=request_json,
- )
- outs["object_id"] = json_response["id"]
- outs["application_id"] = json_response["appId"]
-
- # Grant requested role permissions
- graph_api.grant_role_permissions(
- outs["application_name"],
- application_role_assignments=props.get(
- "application_role_assignments", []
- ),
- delegated_role_assignments=props.get("delegated_role_assignments", []),
- )
-
- # Attach an application secret if requested
- outs["application_secret"] = (
- graph_api.create_application_secret(
- props["application_name"],
- props["application_secret_name"],
- )
- if props.get("application_secret_name", None)
- else ""
- )
- except Exception as exc:
- msg = f"Failed to create application '{props['application_name']}' in Entra ID."
- raise DataSafeHavenMicrosoftGraphError(msg) from exc
- return CreateResult(
- f"EntraApplication-{props['application_name']}",
- outs=outs,
- )
-
- def delete(self, id_: str, props: dict[str, Any]) -> None:
- """Delete an Entra application."""
- # Use `id` as a no-op to avoid ARG002 while maintaining function signature
- id(id_)
- try:
- graph_api = GraphApi.from_token(self.auth_token, disable_logging=True)
- graph_api.delete_application(props["application_name"])
- except Exception as exc:
- msg = f"Failed to delete application '{props['application_name']}' from Entra ID."
- raise DataSafeHavenMicrosoftGraphError(msg) from exc
-
- def diff(
- self,
- id_: str,
- old_props: dict[str, Any],
- new_props: dict[str, Any],
- ) -> DiffResult:
- """Calculate diff between old and new state"""
- # Use `id` as a no-op to avoid ARG002 while maintaining function signature
- id(id_)
- # We exclude '__provider' from the diff. This is a Base64-encoded pickle of this
- # EntraApplicationProvider instance. This means that it contains self.auth_token
- # and would otherwise trigger a diff each time the auth_token changes. Note that
- # ignoring '__provider' could cause issues if the structure of this class
- # changes in any other way, but this could be fixed by manually deleting the
- # application in the Entra directory.
- return self.partial_diff(old_props, new_props, excluded_props=["__provider"])
-
- def refresh(self, props: dict[str, Any]) -> dict[str, Any]:
- try:
- outs = dict(**props)
- with suppress(DataSafeHavenMicrosoftGraphError, KeyError):
- graph_api = GraphApi.from_token(self.auth_token, disable_logging=True)
- if json_response := graph_api.get_application_by_name(
- outs["application_name"]
- ):
- outs["object_id"] = json_response["id"]
- outs["application_id"] = json_response["appId"]
-
- # Ensure that requested role permissions have been granted
- graph_api.grant_role_permissions(
- outs["application_name"],
- application_role_assignments=props.get(
- "application_role_assignments", []
- ),
- delegated_role_assignments=props.get(
- "delegated_role_assignments", []
- ),
- )
- return outs
- except Exception as exc:
- msg = f"Failed to refresh application '{props['application_name']}' in Entra ID."
- raise DataSafeHavenMicrosoftGraphError(msg) from exc
-
- def update(
- self,
- id_: str,
- old_props: dict[str, Any],
- new_props: dict[str, Any],
- ) -> UpdateResult:
- """Updating is deleting followed by creating."""
- try:
- # Delete the old application, using the auth token from new_props
- old_props_ = {**old_props}
- self.delete(id_, old_props_)
- # Create a new application
- updated = self.create(new_props)
- return UpdateResult(outs=updated.outs)
- except Exception as exc:
- msg = f"Failed to update application '{new_props['application_name']}' in Entra ID."
- raise DataSafeHavenMicrosoftGraphError(msg) from exc
-
-
-class EntraApplication(Resource):
- application_id: Output[str]
- application_secret: Output[str]
- object_id: Output[str]
- _resource_type_name = "dsh:common:EntraApplication" # set resource type
-
- def __init__(
- self,
- name: str,
- props: EntraApplicationProps,
- auth_token: str,
- opts: ResourceOptions | None = None,
- ):
- super().__init__(
- EntraApplicationProvider(auth_token),
- name,
- {
- "application_id": None,
- "application_secret": None,
- "object_id": None,
- **vars(props),
- },
- opts,
- )
diff --git a/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py b/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py
index 181839e71d..e259de4806 100644
--- a/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py
+++ b/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py
@@ -4,6 +4,7 @@
from pulumi_azure_native import storage
from data_safe_haven.external import AzureIPv4Range
+from data_safe_haven.types import AzureServiceTag
class WrappedNFSV3StorageAccount(storage.StorageAccount):
@@ -24,17 +25,35 @@ def __init__(
resource_name: str,
*,
account_name: Input[str],
- allowed_ip_addresses: Input[Sequence[str]],
+ allowed_ip_addresses: Input[Sequence[str]] | None,
+ allowed_service_tag: AzureServiceTag | None,
location: Input[str],
resource_group_name: Input[str],
subnet_id: Input[str],
opts: ResourceOptions,
tags: Input[Mapping[str, Input[str]]],
):
+ if allowed_service_tag == AzureServiceTag.INTERNET:
+ default_action = storage.DefaultAction.ALLOW
+ ip_rules = []
+ else:
+ default_action = storage.DefaultAction.DENY
+ ip_rules = Output.from_input(allowed_ip_addresses).apply(
+ lambda ip_ranges: [
+ storage.IPRuleArgs(
+ action=storage.Action.ALLOW,
+ i_p_address_or_range=str(ip_address),
+ )
+ for ip_range in sorted(ip_ranges)
+ for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips()
+ ]
+ )
+
self.resource_group_name_ = Output.from_input(resource_group_name)
super().__init__(
resource_name,
account_name=account_name,
+ allow_blob_public_access=False,
enable_https_traffic_only=True,
enable_nfs_v3=True,
encryption=self.encryption_args,
@@ -44,23 +63,15 @@ def __init__(
minimum_tls_version=storage.MinimumTlsVersion.TLS1_2,
network_rule_set=storage.NetworkRuleSetArgs(
bypass=storage.Bypass.AZURE_SERVICES,
- default_action=storage.DefaultAction.DENY,
- ip_rules=Output.from_input(allowed_ip_addresses).apply(
- lambda ip_ranges: [
- storage.IPRuleArgs(
- action=storage.Action.ALLOW,
- i_p_address_or_range=str(ip_address),
- )
- for ip_range in sorted(ip_ranges)
- for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips()
- ]
- ),
+ default_action=default_action,
+ ip_rules=ip_rules,
virtual_network_rules=[
storage.VirtualNetworkRuleArgs(
virtual_network_resource_id=subnet_id,
)
],
),
+ public_network_access=storage.PublicNetworkAccess.ENABLED,
resource_group_name=resource_group_name,
sku=storage.SkuArgs(name=storage.SkuName.PREMIUM_ZRS),
opts=opts,
diff --git a/data_safe_haven/infrastructure/programs/declarative_sre.py b/data_safe_haven/infrastructure/programs/declarative_sre.py
index ce678dbb4a..15989bbe7b 100644
--- a/data_safe_haven/infrastructure/programs/declarative_sre.py
+++ b/data_safe_haven/infrastructure/programs/declarative_sre.py
@@ -35,11 +35,9 @@ def __init__(
self,
context: Context,
config: SREConfig,
- graph_api_token: str,
) -> None:
self.context = context
self.config = config
- self.graph_api_token = graph_api_token
self.stack_name = replace_separators(
f"shm-{context.name}-sre-{config.name}", "-"
)
@@ -112,14 +110,6 @@ def __call__(self) -> None:
]
)
- # Deploy Entra resources
- SREEntraComponent(
- "sre_entra",
- SREEntraProps(
- group_names=ldap_group_names,
- ),
- )
-
# Deploy resource group
resource_group = resources.ResourceGroup(
"sre_resource_group",
@@ -162,6 +152,17 @@ def __call__(self) -> None:
tags=self.tags,
)
+ # Deploy Entra resources
+ entra = SREEntraComponent(
+ "sre_entra",
+ SREEntraProps(
+ group_names=ldap_group_names,
+ shm_name=self.context.name,
+ sre_fqdn=networking.sre_fqdn,
+ sre_name=self.config.name,
+ ),
+ )
+
# Deploy SRE firewall
SREFirewallComponent(
"sre_firewall",
@@ -248,8 +249,8 @@ def __call__(self) -> None:
SREIdentityProps(
dns_server_ip=dns.ip_address,
dockerhub_credentials=dockerhub_credentials,
- entra_application_name=f"sre-{self.config.name}-apricot",
- entra_auth_token=self.graph_api_token,
+ entra_application_id=entra.identity_application_id,
+ entra_application_secret=entra.identity_application_secret,
entra_tenant_id=shm_entra_tenant_id,
location=self.config.azure.location,
resource_group_name=resource_group.name,
@@ -288,9 +289,8 @@ def __call__(self) -> None:
database_password=data.password_user_database_admin,
dns_server_ip=dns.ip_address,
dockerhub_credentials=dockerhub_credentials,
- entra_application_fqdn=networking.sre_fqdn,
- entra_application_name=f"sre-{self.config.name}-guacamole",
- entra_auth_token=self.graph_api_token,
+ entra_application_id=entra.remote_desktop_application_id,
+ entra_application_url=entra.remote_desktop_url,
entra_tenant_id=shm_entra_tenant_id,
ldap_group_filter=ldap_group_filter,
ldap_group_search_base=ldap_group_search_base,
diff --git a/data_safe_haven/infrastructure/programs/imperative_shm.py b/data_safe_haven/infrastructure/programs/imperative_shm.py
index 9b748bbdd1..f233c7c7fe 100644
--- a/data_safe_haven/infrastructure/programs/imperative_shm.py
+++ b/data_safe_haven/infrastructure/programs/imperative_shm.py
@@ -1,10 +1,11 @@
-from data_safe_haven.config import Context, SHMConfig
+from data_safe_haven.config import Context, DSHPulumiConfig, SHMConfig
from data_safe_haven.exceptions import (
DataSafeHavenAzureError,
DataSafeHavenMicrosoftGraphError,
)
from data_safe_haven.external import AzureSdk, GraphApi
from data_safe_haven.logging import get_logger
+from data_safe_haven.types import EntraSignInAudienceType
class ImperativeSHM:
@@ -147,11 +148,16 @@ def deploy(self) -> None:
try:
graph_api.create_application(
self.context.entra_application_name,
- application_scopes=["Group.ReadWrite.All"],
+ application_scopes=[
+ "Application.ReadWrite.All", # For creating applications
+ "AppRoleAssignment.ReadWrite.All", # For application permissions
+ "Directory.ReadWrite.All", # For creating/deleting groups
+ "Group.ReadWrite.All", # For creating/deleting groups
+ ],
delegated_scopes=[],
request_json={
"displayName": self.context.entra_application_name,
- "signInAudience": "AzureADMyOrg",
+ "signInAudience": EntraSignInAudienceType.THIS_TENANT.value,
},
)
# Always recreate the application secret.
@@ -172,6 +178,13 @@ def teardown(self) -> None:
DataSafeHavenAzureError if any resources cannot be destroyed
"""
logger = get_logger()
+ if DSHPulumiConfig.remote_exists(self.context):
+ pulumi_config = DSHPulumiConfig.from_remote(self.context)
+ deployed = pulumi_config.project_names
+ if deployed:
+ logger.info(f"Found deployed SREs: {deployed}.")
+ msg = "Deployed SREs must be torn down before the SHM can be torn down."
+ raise DataSafeHavenAzureError(msg)
try:
logger.info(
f"Removing [green]{self.context.description}[/] resource group {self.context.resource_group_name}."
diff --git a/data_safe_haven/infrastructure/programs/sre/data.py b/data_safe_haven/infrastructure/programs/sre/data.py
index 9e18666277..711b76139f 100644
--- a/data_safe_haven/infrastructure/programs/sre/data.py
+++ b/data_safe_haven/infrastructure/programs/sre/data.py
@@ -35,7 +35,7 @@
SSLCertificateProps,
WrappedNFSV3StorageAccount,
)
-from data_safe_haven.types import AzureDnsZoneNames
+from data_safe_haven.types import AzureDnsZoneNames, AzureServiceTag
class SREDataProps:
@@ -46,7 +46,7 @@ def __init__(
admin_email_address: Input[str],
admin_group_id: Input[str],
admin_ip_addresses: Input[Sequence[str]],
- data_provider_ip_addresses: Input[Sequence[str]],
+ data_provider_ip_addresses: Input[list[str]] | AzureServiceTag,
dns_private_zones: Input[dict[str, network.PrivateZone]],
dns_record: Input[network.RecordSet],
dns_server_admin_password: Input[pulumi_random.RandomPassword],
@@ -64,13 +64,7 @@ def __init__(
self.admin_email_address = admin_email_address
self.admin_group_id = admin_group_id
self.data_configuration_ip_addresses = admin_ip_addresses
- self.data_private_sensitive_ip_addresses = Output.all(
- admin_ip_addresses, data_provider_ip_addresses
- ).apply(
- lambda address_lists: {
- ip for address_list in address_lists for ip in address_list
- }
- )
+ self.data_provider_ip_addresses = data_provider_ip_addresses
self.dns_private_zones = dns_private_zones
self.dns_record = dns_record
self.password_dns_server_admin = dns_server_admin_password
@@ -112,6 +106,19 @@ def __init__(
child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self))
child_tags = {"component": "data"} | (tags if tags else {})
+ if isinstance(props.data_provider_ip_addresses, list):
+ data_private_sensitive_service_tag = None
+ data_private_sensitive_ip_addresses = Output.all(
+ props.data_configuration_ip_addresses, props.data_provider_ip_addresses
+ ).apply(
+ lambda address_lists: {
+ ip for address_list in address_lists for ip in address_list
+ }
+ )
+ else:
+ data_private_sensitive_ip_addresses = None
+ data_private_sensitive_service_tag = props.data_provider_ip_addresses
+
# Define Key Vault reader
identity_key_vault_reader = managedidentity.UserAssignedIdentity(
f"{self._name}_id_key_vault_reader",
@@ -466,7 +473,8 @@ def __init__(
account_name=alphanumeric(
f"{''.join(truncate_tokens(stack_name.split('-'), 11))}sensitivedata{sha256hash(self._name)}"
)[:24],
- allowed_ip_addresses=props.data_private_sensitive_ip_addresses,
+ allowed_ip_addresses=data_private_sensitive_ip_addresses,
+ allowed_service_tag=data_private_sensitive_service_tag,
location=props.location,
subnet_id=props.subnet_data_private_id,
resource_group_name=props.resource_group_name,
diff --git a/data_safe_haven/infrastructure/programs/sre/desired_state.py b/data_safe_haven/infrastructure/programs/sre/desired_state.py
index 73466d6c5b..c4392f5210 100644
--- a/data_safe_haven/infrastructure/programs/sre/desired_state.py
+++ b/data_safe_haven/infrastructure/programs/sre/desired_state.py
@@ -108,6 +108,7 @@ def __init__(
f"{''.join(truncate_tokens(stack_name.split('-'), 11))}desiredstate{sha256hash(self._name)}"
)[:24],
allowed_ip_addresses=props.admin_ip_addresses,
+ allowed_service_tag=None,
location=props.location,
resource_group_name=props.resource_group_name,
subnet_id=props.subnet_desired_state_id,
diff --git a/data_safe_haven/infrastructure/programs/sre/entra.py b/data_safe_haven/infrastructure/programs/sre/entra.py
index 1f44995f9f..abc0241070 100644
--- a/data_safe_haven/infrastructure/programs/sre/entra.py
+++ b/data_safe_haven/infrastructure/programs/sre/entra.py
@@ -2,10 +2,16 @@
from collections.abc import Mapping
-from pulumi import ComponentResource, ResourceOptions
-from pulumi_azuread import Group
+import pulumi_azuread as entra
+from pulumi import ComponentResource, Input, Output, ResourceOptions
from data_safe_haven.functions import replace_separators
+from data_safe_haven.infrastructure.components import (
+ EntraApplicationComponent,
+ EntraDesktopApplicationProps,
+ EntraWebApplicationProps,
+)
+from data_safe_haven.types import EntraApplicationId, EntraAppPermissionType
class SREEntraProps:
@@ -14,8 +20,14 @@ class SREEntraProps:
def __init__(
self,
group_names: Mapping[str, str],
+ sre_fqdn: Input[str],
+ shm_name: Input[str],
+ sre_name: Input[str],
) -> None:
self.group_names = group_names
+ self.shm_name = shm_name
+ self.sre_fqdn = sre_fqdn
+ self.sre_name = sre_name
class SREEntraComponent(ComponentResource):
@@ -28,13 +40,84 @@ def __init__(
opts: ResourceOptions | None = None,
) -> None:
super().__init__("dsh:sre:EntraComponent", name, {}, opts)
+ child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self))
- for group_id, group_description in props.group_names.items():
- Group(
- replace_separators(f"{self._name}_group_{group_id}", "_"),
+ # Create Entra groups
+ for group_name, group_description in props.group_names.items():
+ entra.Group(
+ replace_separators(f"{self._name}_group_{group_name}", "_"),
description=group_description,
display_name=group_description,
mail_enabled=False,
prevent_duplicate_names=True,
security_enabled=True,
)
+
+ # Get the Microsoft Graph service principal
+ msgraph_service_principal = entra.ServicePrincipal(
+ f"{self._name}_microsoft_graph_service_principal",
+ client_id=EntraApplicationId.MICROSOFT_GRAPH.value,
+ use_existing=True,
+ )
+
+ # Identity application
+ # - needs read-only permissions for users/groups
+ # - needs delegated permission to read users (for validating log-in attempts)
+ # - needs an application secret for authentication
+ self.identity_application = EntraApplicationComponent(
+ f"{self._name}_identity",
+ EntraDesktopApplicationProps(
+ application_name=Output.concat(
+ "Data Safe Haven (",
+ props.shm_name,
+ " - ",
+ props.sre_name,
+ ") Identity Service Principal",
+ ),
+ application_permissions=[
+ (EntraAppPermissionType.APPLICATION, "User.Read.All"),
+ (EntraAppPermissionType.APPLICATION, "GroupMember.Read.All"),
+ (EntraAppPermissionType.DELEGATED, "User.Read.All"),
+ ],
+ msgraph_service_principal=msgraph_service_principal,
+ ),
+ opts=child_opts,
+ )
+
+ # Add an application password
+ self.identity_application_secret = entra.ApplicationPassword(
+ f"{self._name}_identity_application_secret",
+ application_id=self.identity_application.application.id,
+ display_name="Apricot Authentication Secret",
+ )
+
+ # Remote desktop application
+ # - only used as part of the OAuth 2.0 authorization flow
+ # - does not need any application permissions
+ # - does not need an application secret
+ self.remote_desktop_url = Output.from_input(props.sre_fqdn).apply(
+ lambda fqdn: f"https://{str(fqdn).strip('/')}/"
+ )
+ self.remote_desktop_application = EntraApplicationComponent(
+ f"{self._name}_remote_desktop",
+ EntraWebApplicationProps(
+ application_name=Output.concat(
+ "Data Safe Haven (",
+ props.shm_name,
+ " - ",
+ props.sre_name,
+ ") Remote Desktop Service Principal",
+ ),
+ application_permissions=[],
+ msgraph_service_principal=msgraph_service_principal,
+ redirect_url=self.remote_desktop_url,
+ ),
+ opts=child_opts,
+ )
+
+ # Register outputs
+ self.identity_application_id = self.identity_application.application.client_id
+ self.identity_application_secret = self.identity_application_secret.value
+ self.remote_desktop_application_id = (
+ self.remote_desktop_application.application.client_id
+ )
diff --git a/data_safe_haven/infrastructure/programs/sre/identity.py b/data_safe_haven/infrastructure/programs/sre/identity.py
index 0196fe7e39..7839853384 100644
--- a/data_safe_haven/infrastructure/programs/sre/identity.py
+++ b/data_safe_haven/infrastructure/programs/sre/identity.py
@@ -11,8 +11,6 @@
get_ip_address_from_container_group,
)
from data_safe_haven.infrastructure.components import (
- EntraApplication,
- EntraApplicationProps,
LocalDnsRecordComponent,
LocalDnsRecordProps,
)
@@ -25,8 +23,8 @@ def __init__(
self,
dns_server_ip: Input[str],
dockerhub_credentials: DockerHubCredentials,
- entra_application_name: Input[str],
- entra_auth_token: str,
+ entra_application_id: Input[str],
+ entra_application_secret: Input[str],
entra_tenant_id: Input[str],
location: Input[str],
resource_group_name: Input[str],
@@ -38,8 +36,8 @@ def __init__(
) -> None:
self.dns_server_ip = dns_server_ip
self.dockerhub_credentials = dockerhub_credentials
- self.entra_application_name = entra_application_name
- self.entra_auth_token = entra_auth_token
+ self.entra_application_id = entra_application_id
+ self.entra_application_secret = entra_application_secret
self.entra_tenant_id = entra_tenant_id
self.location = location
self.resource_group_name = resource_group_name
@@ -82,20 +80,6 @@ def __init__(
opts=child_opts,
)
- # Define Entra ID application
- entra_application = EntraApplication(
- f"{self._name}_entra_application",
- EntraApplicationProps(
- application_name=props.entra_application_name,
- application_role_assignments=["User.Read.All", "GroupMember.Read.All"],
- application_secret_name="Apricot Authentication Secret",
- delegated_role_assignments=["User.Read.All"],
- public_client_redirect_uri="urn:ietf:wg:oauth:2.0:oob",
- ),
- auth_token=props.entra_auth_token,
- opts=child_opts,
- )
-
# Define the LDAP server container group with Apricot
container_group = containerinstance.ContainerGroup(
f"{self._name}_container_group",
@@ -111,11 +95,11 @@ def __init__(
),
containerinstance.EnvironmentVariableArgs(
name="CLIENT_ID",
- value=entra_application.application_id,
+ value=props.entra_application_id,
),
containerinstance.EnvironmentVariableArgs(
name="CLIENT_SECRET",
- secure_value=entra_application.application_secret,
+ secure_value=props.entra_application_secret,
),
containerinstance.EnvironmentVariableArgs(
name="DEBUG",
diff --git a/data_safe_haven/infrastructure/programs/sre/remote_desktop.py b/data_safe_haven/infrastructure/programs/sre/remote_desktop.py
index 3be1207c77..e2df83ede5 100644
--- a/data_safe_haven/infrastructure/programs/sre/remote_desktop.py
+++ b/data_safe_haven/infrastructure/programs/sre/remote_desktop.py
@@ -11,8 +11,6 @@
get_id_from_subnet,
)
from data_safe_haven.infrastructure.components import (
- EntraApplication,
- EntraApplicationProps,
FileShareFile,
FileShareFileProps,
PostgresqlDatabaseComponent,
@@ -32,9 +30,8 @@ def __init__(
database_password: Input[str],
dns_server_ip: Input[str],
dockerhub_credentials: DockerHubCredentials,
- entra_application_fqdn: Input[str],
- entra_application_name: Input[str],
- entra_auth_token: str,
+ entra_application_id: Input[str],
+ entra_application_url: Input[str],
entra_tenant_id: Input[str],
ldap_group_filter: Input[str],
ldap_group_search_base: Input[str],
@@ -58,9 +55,8 @@ def __init__(
self.disable_paste = not allow_paste
self.dns_server_ip = dns_server_ip
self.dockerhub_credentials = dockerhub_credentials
- self.entra_application_name = entra_application_name
- self.entra_application_url = Output.concat("https://", entra_application_fqdn)
- self.entra_auth_token = entra_auth_token
+ self.entra_application_id = entra_application_id
+ self.entra_application_url = entra_application_url
self.entra_tenant_id = entra_tenant_id
self.ldap_group_filter = ldap_group_filter
self.ldap_group_search_base = ldap_group_search_base
@@ -119,17 +115,6 @@ def __init__(
child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self))
child_tags = {"component": "remote desktop"} | (tags if tags else {})
- # Define Entra ID application
- entra_application = EntraApplication(
- f"{self._name}_entra_application",
- EntraApplicationProps(
- application_name=props.entra_application_name,
- web_redirect_url=props.entra_application_url,
- ),
- auth_token=props.entra_auth_token,
- opts=child_opts,
- )
-
# Define configuration file shares
file_share = storage.FileShare(
f"{self._name}_file_share",
@@ -224,7 +209,7 @@ def __init__(
),
containerinstance.EnvironmentVariableArgs(
name="OPENID_CLIENT_ID",
- value=entra_application.application_id,
+ value=props.entra_application_id,
),
containerinstance.EnvironmentVariableArgs(
name="OPENID_ISSUER",
diff --git a/data_safe_haven/infrastructure/project_manager.py b/data_safe_haven/infrastructure/project_manager.py
index a6d5af805b..eca352b28b 100644
--- a/data_safe_haven/infrastructure/project_manager.py
+++ b/data_safe_haven/infrastructure/project_manager.py
@@ -446,14 +446,12 @@ def __init__(
pulumi_config: DSHPulumiConfig,
*,
create_project: bool = False,
- graph_api_token: str | None = None,
) -> None:
"""Constructor"""
- token = graph_api_token or ""
super().__init__(
context,
pulumi_config,
config.name,
- DeclarativeSRE(context, config, token),
+ DeclarativeSRE(context, config),
create_project=create_project,
)
diff --git a/data_safe_haven/provisioning/sre_provisioning_manager.py b/data_safe_haven/provisioning/sre_provisioning_manager.py
index 7c39046b86..b269779d8f 100644
--- a/data_safe_haven/provisioning/sre_provisioning_manager.py
+++ b/data_safe_haven/provisioning/sre_provisioning_manager.py
@@ -7,7 +7,6 @@
AzureContainerInstance,
AzurePostgreSQLDatabase,
AzureSdk,
- GraphApi,
)
from data_safe_haven.infrastructure import SREProjectManager
from data_safe_haven.logging import get_logger
@@ -19,7 +18,6 @@ class SREProvisioningManager:
def __init__(
self,
- graph_api_token: str,
location: AzureLocation,
sre_name: str,
sre_stack: SREProjectManager,
@@ -28,7 +26,6 @@ def __init__(
):
self._available_vm_skus: dict[str, dict[str, Any]] | None = None
self.location = location
- self.graph_api = GraphApi.from_token(graph_api_token)
self.logger = get_logger()
self.sre_name = sre_name
self.subscription_name = subscription_name
diff --git a/data_safe_haven/types/__init__.py b/data_safe_haven/types/__init__.py
index 4f2f89b3be..728df06c19 100644
--- a/data_safe_haven/types/__init__.py
+++ b/data_safe_haven/types/__init__.py
@@ -8,6 +8,7 @@
Fqdn,
Guid,
IpAddress,
+ SafeSreName,
SafeString,
TimeZone,
UniqueList,
@@ -17,6 +18,9 @@
AzureSdkCredentialScope,
AzureServiceTag,
DatabaseSystem,
+ EntraApplicationId,
+ EntraAppPermissionType,
+ EntraSignInAudienceType,
FirewallPriorities,
ForbiddenDomains,
NetworkingPriorities,
@@ -36,7 +40,10 @@
"AzureVmSku",
"DatabaseSystem",
"EmailAddress",
+ "EntraApplicationId",
+ "EntraAppPermissionType",
"EntraGroupName",
+ "EntraSignInAudienceType",
"FirewallPriorities",
"ForbiddenDomains",
"Fqdn",
@@ -46,6 +53,7 @@
"PathType",
"PermittedDomains",
"Ports",
+ "SafeSreName",
"SafeString",
"SoftwarePackageCategory",
"TimeZone",
diff --git a/data_safe_haven/types/annotated_types.py b/data_safe_haven/types/annotated_types.py
index 639bf03129..d6258b0e7a 100644
--- a/data_safe_haven/types/annotated_types.py
+++ b/data_safe_haven/types/annotated_types.py
@@ -21,6 +21,7 @@
Fqdn = Annotated[str, AfterValidator(validators.fqdn)]
Guid = Annotated[str, AfterValidator(validators.aad_guid)]
IpAddress = Annotated[str, AfterValidator(validators.ip_address)]
+SafeSreName = Annotated[str, AfterValidator(validators.safe_sre_name)]
SafeString = Annotated[str, AfterValidator(validators.safe_string)]
TimeZone = Annotated[str, AfterValidator(validators.timezone)]
TH = TypeVar("TH", bound=Hashable)
diff --git a/data_safe_haven/types/enums.py b/data_safe_haven/types/enums.py
index 35465f260e..17d5dda8e3 100644
--- a/data_safe_haven/types/enums.py
+++ b/data_safe_haven/types/enums.py
@@ -37,6 +37,25 @@ class DatabaseSystem(str, Enum):
POSTGRESQL = "postgresql"
+@verify(UNIQUE)
+class EntraApplicationId(str, Enum):
+ MICROSOFT_GRAPH = "00000003-0000-0000-c000-000000000000"
+
+
+@verify(UNIQUE)
+class EntraAppPermissionType(str, Enum):
+ APPLICATION = "Role"
+ DELEGATED = "Scope"
+
+
+@verify(UNIQUE)
+class EntraSignInAudienceType(str, Enum):
+ ANY_TENANT = "AzureADMultipleOrgs"
+ ANY_TENANT_OR_PERSONAL = "AzureADandPersonalMicrosoftAccount"
+ PERSONAL = "PersonalMicrosoftAccount"
+ THIS_TENANT = "AzureADMyOrg"
+
+
@verify(UNIQUE)
class FirewallPriorities(int, Enum):
"""Priorities for firewall rules."""
diff --git a/data_safe_haven/validators/__init__.py b/data_safe_haven/validators/__init__.py
index 849b199857..30316e0834 100644
--- a/data_safe_haven/validators/__init__.py
+++ b/data_safe_haven/validators/__init__.py
@@ -6,6 +6,7 @@
typer_entra_group_name,
typer_fqdn,
typer_ip_address,
+ typer_safe_sre_name,
typer_safe_string,
typer_timezone,
)
@@ -18,6 +19,7 @@
entra_group_name,
fqdn,
ip_address,
+ safe_sre_name,
safe_string,
timezone,
unique_list,
@@ -32,6 +34,7 @@
"entra_group_name",
"fqdn",
"ip_address",
+ "safe_sre_name",
"safe_string",
"timezone",
"typer_aad_guid",
@@ -41,6 +44,7 @@
"typer_entra_group_name",
"typer_fqdn",
"typer_ip_address",
+ "typer_safe_sre_name",
"typer_safe_string",
"typer_timezone",
"unique_list",
diff --git a/data_safe_haven/validators/typer.py b/data_safe_haven/validators/typer.py
index f1c8239ecc..fd50774290 100644
--- a/data_safe_haven/validators/typer.py
+++ b/data_safe_haven/validators/typer.py
@@ -33,5 +33,6 @@ def typer_validator(x: Any) -> Any:
typer_entra_group_name = typer_validator_factory(validators.entra_group_name)
typer_fqdn = typer_validator_factory(validators.fqdn)
typer_ip_address = typer_validator_factory(validators.ip_address)
+typer_safe_sre_name = typer_validator_factory(validators.safe_sre_name)
typer_safe_string = typer_validator_factory(validators.safe_string)
typer_timezone = typer_validator_factory(validators.timezone)
diff --git a/data_safe_haven/validators/validators.py b/data_safe_haven/validators/validators.py
index 27507d26b4..c9cea495c9 100644
--- a/data_safe_haven/validators/validators.py
+++ b/data_safe_haven/validators/validators.py
@@ -124,17 +124,24 @@ def ip_address(ip_address: str) -> str:
try:
return str(ipaddress.ip_network(ip_address))
except Exception as exc:
- msg = "Expected valid IPv4 address, for example '1.1.1.1', or 'Internet'."
+ msg = "Expected valid IPv4 address, for example '1.1.1.1'."
raise ValueError(msg) from exc
def safe_string(safe_string: str) -> str:
- if not re.match(r"^[a-zA-Z0-9_-]*$", safe_string) or not safe_string:
+ if not re.match(r"^[a-zA-Z0-9_-]+$", safe_string) or not safe_string:
msg = "Expected valid string containing only letters, numbers, hyphens and underscores."
raise ValueError(msg)
return safe_string
+def safe_sre_name(safe_sre_name: str) -> str:
+ if not re.match(r"^[a-z0-9_-]+$", safe_sre_name) or not safe_sre_name:
+ msg = "Expected valid string containing only lowercase letters, numbers, hyphens and underscores."
+ raise ValueError(msg)
+ return safe_sre_name
+
+
def timezone(timezone: str) -> str:
if timezone not in pytz.all_timezones:
msg = "Expected valid timezone, for example 'Europe/London'."
diff --git a/data_safe_haven/version.py b/data_safe_haven/version.py
index 0513a64c8f..6a7d91a4eb 100644
--- a/data_safe_haven/version.py
+++ b/data_safe_haven/version.py
@@ -1,2 +1,2 @@
-__version__ = "5.0.1"
+__version__ = "5.1.0"
__version_info__ = tuple(__version__.split("."))
diff --git a/docs/source/contributing/index.md b/docs/source/contributing/index.md
index 37f5e26f9d..20c14073bf 100644
--- a/docs/source/contributing/index.md
+++ b/docs/source/contributing/index.md
@@ -9,69 +9,69 @@
![Alvaro Cabrejas Egea](https://avatars.githubusercontent.com/u/22940095?v=4?s=100) Alvaro Cabrejas Egea ๐ป ๐ |
+ ![Arielle Bennett](https://avatars.githubusercontent.com/u/74651964?v=4?s=100) Arielle Bennett ๐ ๐ค ๐ |
+ ![Benjamin Walden](https://avatars.githubusercontent.com/u/54804128?v=4?s=100) Benjamin Walden ๐ ๐ค ๐ ๐ ๐ |
+ ![Brett Todd](https://avatars.githubusercontent.com/u/62715658?v=4?s=100) Brett Todd ๐ป ๐ค |
![Callum Mole](https://avatars.githubusercontent.com/u/22677759?v=4?s=100) Callum Mole ๐ ๐ป |
- ![Carlos Gavidia-Calderon](https://avatars.githubusercontent.com/u/1616531?v=4?s=100) Carlos Gavidia-Calderon ๐ |
+ ![Carlos Gavidia-Calderon](https://avatars.githubusercontent.com/u/1616531?v=4?s=100) Carlos Gavidia-Calderon ๐ ๐ค ๐ |
![Catalina Vallejos](https://avatars.githubusercontent.com/u/7511093?v=4?s=100) Catalina Vallejos ๐ |
- ![Christopher Edsall](https://avatars.githubusercontent.com/u/1021204?v=4?s=100) Christopher Edsall ๐ป ๐ ๐ |
- ![DDelbarre](https://avatars.githubusercontent.com/u/108824056?v=4?s=100) DDelbarre ๐ |
- ![Daniel](https://avatars.githubusercontent.com/u/49038294?v=4?s=100) Daniel ๐ป ๐ |
+ ![Catherine Lawrence](https://avatars.githubusercontent.com/u/38755168?v=4?s=100) Catherine Lawrence ๐ ๐ ๐ ๐ค |
+ ![Christopher Edsall](https://avatars.githubusercontent.com/u/1021204?v=4?s=100) Christopher Edsall ๐ป ๐ ๐ |
+ ![Daniel Allen](https://avatars.githubusercontent.com/u/49038294?v=4?s=100) Daniel Allen ๐ป ๐ |
+ ![Daniel Delbarre](https://avatars.githubusercontent.com/u/108824056?v=4?s=100) Daniel Delbarre ๐ |
![David Beavan](https://avatars.githubusercontent.com/u/6524799?v=4?s=100) David Beavan ๐ ๐ |
- ![David Salvador Jasin](https://avatars.githubusercontent.com/u/57944311?v=4?s=100) David Salvador Jasin ๐ ๐ |
+ ![David Salvador Jasin](https://avatars.githubusercontent.com/u/57944311?v=4?s=100) David Salvador Jasin ๐ ๐ ๐ค |
+ ![David Sarmiento Perez](https://avatars.githubusercontent.com/u/118986872?v=4?s=100) David Sarmiento Perez ๐ ๐ ๐ ๐ค ๐ ๐ฃ ๐ข |
+
+
![Diego Arenas](https://avatars.githubusercontent.com/u/7409896?v=4?s=100) Diego Arenas ๐ป ๐ค ๐ |
![Ed Chalstrey](https://avatars.githubusercontent.com/u/5486164?v=4?s=100) Ed Chalstrey ๐ป ๐ ๐ ๐ค ๐ ๐ โ ๏ธ |
![Evelina Gabasova](https://avatars.githubusercontent.com/u/5541162?v=4?s=100) Evelina Gabasova ๐ |
![Federico Nanni](https://avatars.githubusercontent.com/u/8415204?v=4?s=100) Federico Nanni ๐ป ๐ ๐ ๐ค |
![Franz Kirรกly](https://avatars.githubusercontent.com/u/7985502?v=4?s=100) Franz Kirรกly ๐ |
+ ![George Holmes](https://avatars.githubusercontent.com/u/62715301?v=4?s=100) George Holmes ๐ป ๐ค |
+ ![Guillaume Noell](https://avatars.githubusercontent.com/u/50482094?v=4?s=100) Guillaume Noell ๐ ๐ ๐ค |
- ![Guillaume Noell](https://avatars.githubusercontent.com/u/50482094?v=4?s=100) Guillaume Noell ๐ ๐ ๐ค |
- ![Helen D Little](https://avatars.githubusercontent.com/u/46891265?v=4?s=100) Helen D Little ๐ ๐ |
+ ![Hari Sood](https://avatars.githubusercontent.com/u/67151373?v=4?s=100) Hari Sood ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ข ๐ก๏ธ ๐ |
+ ![Helen Duncan Little](https://avatars.githubusercontent.com/u/46891265?v=4?s=100) Helen Duncan Little ๐ ๐ ๐ค |
![Helen Sherwood-Taylor](https://avatars.githubusercontent.com/u/217966?v=4?s=100) Helen Sherwood-Taylor ๐ค ๐ |
+ ![Ian Carter](https://avatars.githubusercontent.com/u/34555297?v=4?s=100) Ian Carter ๐ป ๐ |
![Jack Roberts](https://avatars.githubusercontent.com/u/16308271?v=4?s=100) Jack Roberts ๐ป ๐ |
![James Cunningham](https://avatars.githubusercontent.com/u/150765?v=4?s=100) James Cunningham ๐ป ๐ ๐ ๐ค ๐ |
![James Geddes](https://avatars.githubusercontent.com/u/1172905?v=4?s=100) James Geddes ๐ |
- ![James Hetherington](https://avatars.githubusercontent.com/u/55009?v=4?s=100) James Hetherington ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ข ๐ |
+ ![James Hetherington](https://avatars.githubusercontent.com/u/55009?v=4?s=100) James Hetherington ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ข ๐ |
![James Robinson](https://avatars.githubusercontent.com/u/3502751?v=4?s=100) James Robinson ๐ป ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
- ![Jim Madge](https://avatars.githubusercontent.com/u/23616154?v=4?s=100) Jim Madge ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ |
+ ![Jim Madge](https://avatars.githubusercontent.com/u/23616154?v=4?s=100) Jim Madge ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
![Josh Everett](https://avatars.githubusercontent.com/u/17052866?v=4?s=100) Josh Everett ๐ |
- ![Jules M](https://avatars.githubusercontent.com/u/40864686?v=4?s=100) Jules M ๐ ๐ค ๐ ๐ |
+ ![Jules Manser](https://avatars.githubusercontent.com/u/40864686?v=4?s=100) Jules Manser ๐ ๐ค ๐ ๐ |
+ ![Kevin Xu](https://avatars.githubusercontent.com/u/48526846?v=4?s=100) Kevin Xu ๐ ๐ค ๐ก๏ธ |
![Kirstie Whitaker](https://avatars.githubusercontent.com/u/3626306?v=4?s=100) Kirstie Whitaker ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ข ๐ |
+
+
![Martin O'Reilly](https://avatars.githubusercontent.com/u/21147592?v=4?s=100) Martin O'Reilly ๐ป ๐ ๐ ๐ ๐ค ๐ ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ ๐ข |
![Matt Craddock](https://avatars.githubusercontent.com/u/5796417?v=4?s=100) Matt Craddock ๐ป ๐ ๐ ๐ค ๐ ๐ ๐ฃ ๐ฌ ๐ ๐ก๏ธ โ ๏ธ |
+ ![Matt Westby](https://avatars.githubusercontent.com/u/91054185?v=4?s=100) Matt Westby ๐ |
+ ![Miguel Morin](https://avatars.githubusercontent.com/u/32396311?v=4?s=100) Miguel Morin ๐ป ๐ ๐ค โ ๏ธ |
+ ![Oliver Forrest](https://avatars.githubusercontent.com/u/49275282?v=4?s=100) Oliver Forrest ๐ ๐ค ๐ ๐ฃ ๐ |
+ ![Oscar T Giles](https://avatars.githubusercontent.com/u/12784013?v=4?s=100) Oscar T Giles ๐ป ๐ ๐ค |
+ ![Rachel Winstanley](https://avatars.githubusercontent.com/u/56362072?v=4?s=100) Rachel Winstanley ๐ ๐ค ๐ ๐ก๏ธ |
- ![Oscar T Giles](https://avatars.githubusercontent.com/u/12784013?v=4?s=100) Oscar T Giles ๐ป ๐ ๐ค |
![Radka Jersakova](https://avatars.githubusercontent.com/u/29207091?v=4?s=100) Radka Jersakova ๐ |
![Rob Clarke](https://avatars.githubusercontent.com/u/29575619?v=4?s=100) Rob Clarke ๐ค ๐ ๐ป ๐ ๐ |
+ ![Sebastian Vollmer](https://avatars.githubusercontent.com/u/12613127?v=4?s=100) Sebastian Vollmer ๐ ๐ ๐ค ๐ |
![Steven Carlysle-Davies](https://avatars.githubusercontent.com/u/5108635?v=4?s=100) Steven Carlysle-Davies ๐ป ๐ ๐ค |
![Tim Hobson](https://avatars.githubusercontent.com/u/26117394?v=4?s=100) Tim Hobson ๐ป ๐ ๐ ๐ค |
![Tom Doel](https://avatars.githubusercontent.com/u/4216900?v=4?s=100) Tom Doel ๐ป ๐ ๐ ๐ค ๐ |
![Tomas Lazauskas](https://avatars.githubusercontent.com/u/12182911?v=4?s=100) Tomas Lazauskas ๐ป ๐ ๐ ๐ค |
- ![arielle-bennett](https://avatars.githubusercontent.com/u/74651964?v=4?s=100) arielle-bennett ๐ ๐ค ๐ |
- ![bw-faststream](https://avatars.githubusercontent.com/u/54804128?v=4?s=100) bw-faststream ๐ ๐ค ๐ ๐ ๐ |
- ![cathiest](https://avatars.githubusercontent.com/u/38755168?v=4?s=100) cathiest ๐ ๐ ๐ ๐ค |
- ![davsarper](https://avatars.githubusercontent.com/u/118986872?v=4?s=100) davsarper ๐ ๐ ๐ ๐ค ๐ ๐ฃ ๐ข |
- ![ens-brett-todd](https://avatars.githubusercontent.com/u/62715658?v=4?s=100) ens-brett-todd ๐ป ๐ค |
- ![ens-george-holmes](https://avatars.githubusercontent.com/u/62715301?v=4?s=100) ens-george-holmes ๐ป ๐ค |
- ![getcarter21](https://avatars.githubusercontent.com/u/34555297?v=4?s=100) getcarter21 ๐ป ๐ |
-
-
- ![harisood](https://avatars.githubusercontent.com/u/67151373?v=4?s=100) harisood ๐ ๐ ๐ค ๐ ๐ ๐ ๐ฃ ๐ฌ ๐ข ๐ก๏ธ ๐ |
- ![kevinxufs](https://avatars.githubusercontent.com/u/48526846?v=4?s=100) kevinxufs ๐ ๐ค ๐ก๏ธ |
- ![mattwestby](https://avatars.githubusercontent.com/u/91054185?v=4?s=100) mattwestby ๐ |
- ![miguelmorin](https://avatars.githubusercontent.com/u/32396311?v=4?s=100) miguelmorin ๐ป ๐ ๐ค โ ๏ธ |
- ![oforrest](https://avatars.githubusercontent.com/u/49275282?v=4?s=100) oforrest ๐ ๐ค ๐ ๐ฃ ๐ |
- ![rwinstanley1](https://avatars.githubusercontent.com/u/56362072?v=4?s=100) rwinstanley1 ๐ ๐ค ๐ ๐ก๏ธ |
- ![vollmersj](https://avatars.githubusercontent.com/u/12613127?v=4?s=100) vollmersj ๐ ๐ ๐ค ๐ |
-
-
- ![warwick26](https://avatars.githubusercontent.com/u/33690673?v=4?s=100) warwick26 ๐ป ๐ค |
+ ![Warwick Wood](https://avatars.githubusercontent.com/u/33690673?v=4?s=100) Warwick Wood ๐ป ๐ค |
diff --git a/docs/source/deployment/deploy_sre.md b/docs/source/deployment/deploy_sre.md
index 2a1e5511a7..5a5a5b4166 100644
--- a/docs/source/deployment/deploy_sre.md
+++ b/docs/source/deployment/deploy_sre.md
@@ -46,6 +46,7 @@ $ dsh config template --file PATH_YOU_WANT_TO_SAVE_YOUR_YAML_FILE_TO \
:::{code} yaml
azure:
+ location: # Azure location where SRE resources will be deployed
subscription_id: # ID of the Azure subscription that the TRE will be deployed to
tenant_id: # Home tenant for the Azure account used to deploy infrastructure: `az account show`
description: # A free-text description of your SRE deployment
@@ -61,22 +62,31 @@ sre:
remote_desktop:
allow_copy: # True/False: whether to allow copying text out of the environment
allow_paste: # True/False: whether to allow pasting text into the environment
- research_user_ip_addresses: # List of IP addresses belonging to users
+ research_user_ip_addresses:
+ - # List of IP addresses belonging to users
+ - # You can also use the tag 'Internet' instead of a list
software_packages: # Which Python/R packages to allow users to install: [any/pre-approved/none]
+ storage_quota_gb:
+ home: # Total size in GiB across all home directories
+ shared: #Total size in GiB for the shared directories
timezone: # Timezone in pytz format (eg. Europe/London)
workspace_skus: # List of Azure VM SKUs that will be used for data analysis.
:::
::::
-:::{admonition} Supported Azure regions
-:class: dropdown important
+### Configuration guidance
+
+#### Choosing an Azure region
Some of the SRE resources are not available in all Azure regions.
- Workspace virtual machines use zone redundant storage managed disks which have [limited regional availability](https://learn.microsoft.com/en-us/azure/virtual-machines/disks-redundancy).
- Some shares mounted on workspace virtual machines require premium file shares which have [limited regional availability](https://learn.microsoft.com/en-us/azure/storage/files/redundancy-premium-file-shares).
+:::{admonition} Supported Azure regions
+:class: dropdown important
+
The regions which satisfy all requirements are,
- Australia East
@@ -111,6 +121,8 @@ The regions which satisfy all requirements are,
:::
+#### Choosing a VM SKU
+
:::{hint}
See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/) for a full list of valid Azure VM SKUs.
:::
@@ -155,7 +167,7 @@ As some general recommendations,
- For general purpose use, the D family gives decent performance and a good balance of CPU and memory.
The [Dsv6 series](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/general-purpose/dsv6-series#sizes-in-series) is a good starting point and can be scaled from 2 CPUs and 8 GB RAM to 128 CPUs and 512 GB RAM.
- - `Standard_D8s_v6` should give reasonable performance for a single concurrent user.
+ - `Standard_D8s_v5` should give reasonable performance for a single concurrent user.
- For GPU accelerated work, the NC family provides Nvidia GPUs and a good balance of CPU and memory.
In order of increasing throughput, the `NCv3` series features Nvidia V100 GPUs, the `NC_A100_v4` series features Nvidia A100 GPUs, and the `NCads_H100_v5` series features Nvidia H100 GPUs.
- `Stanard_NC6s_v3` should give reasonable performance for a single concurrent user with AI/ML workloads.
@@ -164,6 +176,70 @@ As some general recommendations,
:::
+#### Copy and paste
+
+The [Guacamole clipboard](https://guacamole.apache.org/doc/gug/using-guacamole.html#using-the-clipboard) provides an interface between the local clipboard and the clipboard on the remote workspaces.
+Only text is allowed to be passed through the Guacamole clipboard.
+
+The ability to copy and paste text to or from SRE workspaces via the Guacamole clipboard can be controlled with the DSH configuration parameters `allow_copy` and `allow_paste`.
+`allow_copy` allows users to copy text from an SRE workspace to the Guacamole clipboard.
+`allow_paste` allows users to paste text into an SRE workspace from the Guacamole clipboard.
+These options have no impact on the ability to use copy and paste within a workspace.
+
+The impact of setting each of these options is detailed in the following table.
+
+
+ Configuration of copy and paste
+
+
+ Configuration setting |
+ Resulting behaviour |
+
+
+ allow_copy |
+ allow_paste |
+ Copy/paste within workspace |
+ Copy/paste between workspaces |
+ Copy to local machine |
+ Paste from local machine |
+
+
+
+
+ true |
+ true |
+ yes |
+ yes (via local machine) |
+ yes |
+ yes |
+
+
+ true |
+ false |
+ yes |
+ no |
+ yes |
+ no |
+
+
+ false |
+ true |
+ yes |
+ no |
+ no |
+ yes |
+
+
+ false |
+ false |
+ yes |
+ no |
+ no |
+ no |
+
+
+
+
## Upload the configuration file
- Upload the config to Azure. This will validate your file and report any problems.
diff --git a/docs/source/management/index.md b/docs/source/management/index.md
index e9f49a5733..234fe8136a 100644
--- a/docs/source/management/index.md
+++ b/docs/source/management/index.md
@@ -141,6 +141,43 @@ Tearing down the SHM also renders the SREs inaccessible to users and prevents th
All SREs associated with the SHM should be torn down before the SHM is torn down.
::::
+### Updating SREs
+
+SREs are modified by updating the configuration then running the deploy command.
+
+- The existing configuration for the SRE can be shown using the following:
+
+```{code} shell
+$ dsh config show YOUR_SRE_NAME
+```
+
+- If you do not have a local copy, you can write one with the `--file` option:
+
+```{code} shell
+$ dsh config show YOUR_SRE_NAME --file YOUR_SRE_NAME.yaml
+```
+
+- Edit the configuration file locally, and upload the new version:
+
+```{code} shell
+$ dsh config upload YOUR_SRE_NAME.yaml
+```
+
+- You will be shown the differences between the existing configuration and the new configuration and asked to confirm that they are correct.
+- Finally, deploy your SRE to apply any changes:
+
+```{code} shell
+$ dsh sre deploy YOUR_SRE_NAME
+```
+
+::::{admonition} Changing administrator IP addresses
+:class: warning
+The administrator IP addresses declared in the SRE configuration are used to create access rules for SRE infrastructure.
+Therefore, after an SRE has been deployed, some changes can only be made from IP addresses on that list.
+
+As a consequence, if you want to update the list of administrator IP addresses, for example to add a new administrator, you must do so from an IP address that is already allowed.
+::::
+
## Managing data ingress and egress
### Data Ingress
diff --git a/docs/source/roles/researcher/using_the_sre.md b/docs/source/roles/researcher/using_the_sre.md
index 51a7b60abf..1620a8f717 100644
--- a/docs/source/roles/researcher/using_the_sre.md
+++ b/docs/source/roles/researcher/using_the_sre.md
@@ -48,6 +48,13 @@ You can make the process as easy as possible by providing as much information as
For instance, describing in detail what a dataset contains and how it will be use will help speed up decision making.
:::
+## {{scissors}} Copy and paste
+
+It is always possible to use copy and paste as normal within an SRE workspace.
+However, the ability to copy and paste text to or from an SRE workspace depends on the specific configuration of the SRE.
+The {ref}`system manager ` can configure the SRE workspaces to allow copying text from a workspace, pasting text into a workspace, both, or neither.
+Copy and paste of anything other than text to or from a workspace is not possible.
+
## {{books}} Maintaining an archive of the project
SREs are designed to be ephemeral and only deployed for as long as necessary.
diff --git a/pyproject.toml b/pyproject.toml
index d444befc3d..9304f1bd32 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,37 +25,37 @@ classifiers = [
license = { text = "BSD-3-Clause" }
dependencies = [
"appdirs==1.4.4",
- "azure-core==1.31.0",
+ "azure-core==1.32.0",
"azure-identity==1.19.0",
- "azure-keyvault-certificates==4.8.0",
- "azure-keyvault-keys==4.9.0",
- "azure-keyvault-secrets==4.8.0",
+ "azure-keyvault-certificates==4.9.0",
+ "azure-keyvault-keys==4.10.0",
+ "azure-keyvault-secrets==4.9.0",
"azure-mgmt-compute==33.0.0",
"azure-mgmt-containerinstance==10.1.0",
- "azure-mgmt-dns==8.1.0",
+ "azure-mgmt-dns==8.2.0",
"azure-mgmt-keyvault==10.3.1",
"azure-mgmt-msi==7.0.0",
"azure-mgmt-rdbms==10.1.0",
- "azure-mgmt-resource==23.1.1",
+ "azure-mgmt-resource==23.2.0",
"azure-mgmt-storage==21.2.1",
- "azure-storage-blob==12.23.1",
- "azure-storage-file-datalake==12.17.0",
- "azure-storage-file-share==12.19.0",
+ "azure-storage-blob==12.24.0",
+ "azure-storage-file-datalake==12.18.0",
+ "azure-storage-file-share==12.20.0",
"chevron==0.14.0",
- "cryptography==43.0.1",
+ "cryptography==43.0.3",
"fqdn==1.5.1",
"psycopg[binary]==3.1.19", # needed for installation on older MacOS versions
- "pulumi-azure-native==2.66.0",
- "pulumi-azuread==6.0.0",
- "pulumi-random==4.16.6",
- "pulumi==3.136.1",
+ "pulumi-azure-native==2.72.0",
+ "pulumi-azuread==6.0.1",
+ "pulumi-random==4.16.7",
+ "pulumi==3.139.0",
"pydantic==2.9.2",
- "pyjwt[crypto]==2.9.0",
+ "pyjwt[crypto]==2.10.0",
"pytz==2024.2",
"pyyaml==6.0.2",
- "rich==13.9.2",
- "simple-acme-dns==3.1.0",
- "typer==0.12.5",
+ "rich==13.9.4",
+ "simple-acme-dns==3.2.0",
+ "typer==0.13.0",
"websocket-client==1.8.0",
]
@@ -68,26 +68,26 @@ Source = "https://github.com/alan-turing-institute/data-safe-haven"
docs = [
"emoji==2.14.0",
"myst-parser==4.0.0",
- "pydata-sphinx-theme==0.15.4",
+ "pydata-sphinx-theme==0.16.0",
"sphinx-togglebutton==0.3.2",
"sphinx==8.1.3",
]
lint = [
- "ansible-dev-tools==24.9.0",
- "ansible==10.5.0",
+ "ansible-dev-tools==24.10.2",
+ "ansible==10.6.0",
"black==24.10.0",
- "mypy==1.11.2",
+ "mypy==1.13.0",
"pandas-stubs==2.2.3.241009",
"pydantic==2.9.2",
- "ruff==0.6.9",
+ "ruff==0.7.4",
"types-appdirs==1.4.3.5",
"types-chevron==0.14.2.20240310",
"types-pytz==2024.2.0.20241003",
"types-pyyaml==6.0.12.20240917",
- "types-requests==2.32.0.20240914",
+ "types-requests==2.32.0.20241016",
]
test = [
- "coverage==7.6.3",
+ "coverage==7.6.7",
"freezegun==1.5.1",
"pytest-mock==3.14.0",
"pytest==8.3.3",
diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py
index dab10adb7b..d675398bfc 100644
--- a/tests/commands/conftest.py
+++ b/tests/commands/conftest.py
@@ -48,11 +48,6 @@ def mock_graph_api_get_application_by_name(mocker, request):
)
-@fixture
-def mock_graph_api_token(mocker):
- mocker.patch.object(GraphApi, "token", return_value="dummy-token")
-
-
@fixture
def mock_imperative_shm_deploy(mocker):
mocker.patch.object(
@@ -100,6 +95,15 @@ def mock_pulumi_config_from_remote(mocker, pulumi_config):
mocker.patch.object(DSHPulumiConfig, "from_remote", return_value=pulumi_config)
+@fixture
+def mock_pulumi_config_from_remote_fails(mocker):
+ mocker.patch.object(
+ DSHPulumiConfig,
+ "from_remote",
+ return_value=DataSafeHavenAzureError("mock from_remote failure"),
+ )
+
+
@fixture
def mock_pulumi_config_from_remote_or_create(mocker, pulumi_config_empty):
mocker.patch.object(
@@ -119,6 +123,11 @@ def mock_pulumi_config_upload(mocker):
mocker.patch.object(DSHPulumiConfig, "upload", return_value=None)
+@fixture
+def mock_pulumi_config_remote_exists(mocker):
+ mocker.patch.object(DSHPulumiConfig, "remote_exists", return_value=True)
+
+
@fixture
def mock_shm_config_from_remote(mocker, shm_config):
mocker.patch.object(SHMConfig, "from_remote", return_value=shm_config)
diff --git a/tests/commands/test_config_sre.py b/tests/commands/test_config_sre.py
index 7460a908eb..263def9236 100644
--- a/tests/commands/test_config_sre.py
+++ b/tests/commands/test_config_sre.py
@@ -167,7 +167,7 @@ class TestUploadSRE:
def test_upload_new(
self, mocker, context, runner, sre_config_yaml, sre_config_file
):
- sre_name = "SandBox"
+ sre_name = "sandbox"
sre_filename = sre_config_name(sre_name)
mock_exists = mocker.patch.object(
SREConfig, "remote_exists", return_value=False
@@ -191,7 +191,7 @@ def test_upload_new(
def test_upload_no_changes(
self, mocker, context, runner, sre_config, sre_config_file
):
- sre_name = "SandBox"
+ sre_name = "sandbox"
sre_filename = sre_config_name(sre_name)
mock_exists = mocker.patch.object(SREConfig, "remote_exists", return_value=True)
mock_from_remote = mocker.patch.object(
@@ -249,7 +249,7 @@ def test_upload_changes(
def test_upload_changes_n(
self, mocker, context, runner, sre_config_alternate, sre_config_file
):
- sre_name = "SandBox"
+ sre_name = "sandbox"
sre_filename = sre_config_name(sre_name)
mock_exists = mocker.patch.object(SREConfig, "remote_exists", return_value=True)
mock_from_remote = mocker.patch.object(
@@ -287,7 +287,7 @@ def test_upload_file_does_not_exist(self, mocker, runner):
def test_upload_invalid_config(
self, mocker, runner, context, sre_config_file, sre_config_yaml
):
- sre_name = "SandBox"
+ sre_name = "sandbox"
sre_filename = sre_config_name(sre_name)
mock_exists = mocker.patch.object(SREConfig, "remote_exists", return_value=True)
@@ -310,7 +310,7 @@ def test_upload_invalid_config(
def test_upload_invalid_config_force(
self, mocker, runner, context, sre_config_file, sre_config_yaml
):
- sre_name = "SandBox"
+ sre_name = "sandbox"
sre_filename = sre_config_name(sre_name)
mocker.patch.object(
diff --git a/tests/commands/test_pulumi.py b/tests/commands/test_pulumi.py
index fefb4615fc..b55d2974ec 100644
--- a/tests/commands/test_pulumi.py
+++ b/tests/commands/test_pulumi.py
@@ -6,7 +6,6 @@ def test_run_sre(
self,
runner,
local_project_settings, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_install_plugins, # noqa: ARG002
mock_key_vault_key, # noqa: ARG002
mock_pulumi_config_no_key_from_remote, # noqa: ARG002
@@ -30,7 +29,6 @@ def test_run_sre_invalid_command(
self,
runner,
local_project_settings, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_install_plugins, # noqa: ARG002
mock_key_vault_key, # noqa: ARG002
mock_pulumi_config_no_key_from_remote, # noqa: ARG002
@@ -48,7 +46,6 @@ def test_run_sre_invalid_name(
self,
runner,
local_project_settings, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_install_plugins, # noqa: ARG002
mock_key_vault_key, # noqa: ARG002
mock_pulumi_config_no_key_from_remote, # noqa: ARG002
diff --git a/tests/commands/test_shm.py b/tests/commands/test_shm.py
index 8258d2a16a..e8f3919ed9 100644
--- a/tests/commands/test_shm.py
+++ b/tests/commands/test_shm.py
@@ -7,7 +7,6 @@ def test_infrastructure_deploy(
runner,
mock_imperative_shm_deploy_then_exit, # noqa: ARG002
mock_graph_api_add_custom_domain, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_shm_config_from_remote, # noqa: ARG002
mock_shm_config_remote_exists, # noqa: ARG002
mock_shm_config_upload, # noqa: ARG002
@@ -44,8 +43,9 @@ def test_teardown(
runner,
mock_imperative_shm_teardown_then_exit, # noqa: ARG002
mock_shm_config_from_remote, # noqa: ARG002
+ mock_shm_config_remote_exists, # noqa: ARG002
):
- result = runner.invoke(shm_command_group, ["teardown"])
+ result = runner.invoke(shm_command_group, ["teardown"], input="y")
assert result.exit_code == 1
assert "mock teardown" in result.stdout
@@ -63,9 +63,48 @@ def test_auth_failure(
self,
runner,
mock_azuresdk_get_credential_failure, # noqa: ARG002
+ mock_shm_config_remote_exists, # noqa: ARG002
):
result = runner.invoke(shm_command_group, ["teardown"])
assert result.exit_code == 1
assert "mock get_credential\n" in result.stdout
assert "mock get_credential error" in result.stdout
assert "Could not teardown Safe Haven Management environment." in result.stdout
+
+ def test_teardown_sres_exist(
+ self,
+ runner,
+ mock_azuresdk_get_subscription_name, # noqa: ARG002
+ mock_pulumi_config_from_remote, # noqa: ARG002
+ mock_pulumi_config_remote_exists, # noqa: ARG002
+ mock_shm_config_from_remote, # noqa: ARG002
+ mock_shm_config_remote_exists, # noqa: ARG002
+ ):
+ result = runner.invoke(shm_command_group, ["teardown"], input="y")
+ assert result.exit_code == 1
+ assert "Found deployed SREs" in result.stdout
+
+ def test_teardown_user_cancelled(
+ self,
+ runner,
+ mock_azuresdk_get_subscription_name, # noqa: ARG002
+ mock_pulumi_config_from_remote, # noqa: ARG002
+ mock_shm_config_from_remote, # noqa: ARG002
+ mock_shm_config_remote_exists, # noqa: ARG002
+ ):
+ result = runner.invoke(shm_command_group, ["teardown"], input="n")
+ assert result.exit_code == 0
+ assert "cancelled" in result.stdout
+
+ def test_teardown_no_pulumi_config(
+ self,
+ runner,
+ mock_azuresdk_get_subscription_name, # noqa: ARG002
+ mock_pulumi_config_from_remote_fails, # noqa: ARG002
+ mock_shm_config_from_remote, # noqa: ARG002
+ mock_imperative_shm_teardown_then_exit, # noqa: ARG002
+ mock_shm_config_remote_exists, # noqa: ARG002
+ ):
+ result = runner.invoke(shm_command_group, ["teardown"], input="y")
+ assert result.exit_code == 1
+ assert "mock teardown" in result.stdout
diff --git a/tests/commands/test_sre.py b/tests/commands/test_sre.py
index 6c4a13f545..a13518a878 100644
--- a/tests/commands/test_sre.py
+++ b/tests/commands/test_sre.py
@@ -13,7 +13,6 @@ def test_deploy(
self,
runner: CliRunner,
mock_azuresdk_get_subscription_name, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_contextmanager_assert_context, # noqa: ARG002
mock_ip_1_2_3_4, # noqa: ARG002
mock_pulumi_config_from_remote_or_create, # noqa: ARG002
@@ -34,7 +33,6 @@ def test_no_application(
runner: CliRunner,
mock_azuresdk_get_subscription_name, # noqa: ARG002
mock_contextmanager_assert_context, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_ip_1_2_3_4, # noqa: ARG002
mock_pulumi_config_from_remote_or_create, # noqa: ARG002
mock_shm_config_from_remote, # noqa: ARG002
@@ -56,7 +54,6 @@ def test_no_application_secret(
mocker: MockerFixture,
mock_azuresdk_get_subscription_name, # noqa: ARG002
mock_graph_api_get_application_by_name, # noqa: ARG002
- mock_graph_api_token, # noqa: ARG002
mock_ip_1_2_3_4, # noqa: ARG002
mock_pulumi_config_from_remote_or_create, # noqa: ARG002
mock_shm_config_from_remote, # noqa: ARG002
@@ -104,14 +101,12 @@ class TestTeardownSRE:
def test_teardown(
self,
runner: CliRunner,
- mock_graph_api_token, # noqa: ARG002
mock_ip_1_2_3_4, # noqa: ARG002
mock_pulumi_config_from_remote, # noqa: ARG002
- mock_shm_config_from_remote, # noqa: ARG002
mock_sre_config_from_remote, # noqa: ARG002
mock_sre_project_manager_teardown_then_exit, # noqa: ARG002
) -> None:
- result = runner.invoke(sre_command_group, ["teardown", "sandbox"])
+ result = runner.invoke(sre_command_group, ["teardown", "sandbox"], input="y")
assert result.exit_code == 1
assert "mock teardown" in result.stdout
@@ -142,3 +137,15 @@ def test_auth_failure(
assert result.exit_code == 1
assert "mock get_credential\n" in result.stdout
assert "mock get_credential error" in result.stdout
+
+ def test_teardown_cancelled(
+ self,
+ runner: CliRunner,
+ mock_ip_1_2_3_4, # noqa: ARG002
+ mock_pulumi_config_from_remote, # noqa: ARG002
+ mock_sre_config_from_remote, # noqa: ARG002
+ mock_sre_project_manager_teardown_then_exit, # noqa: ARG002
+ ) -> None:
+ result = runner.invoke(sre_command_group, ["teardown", "sandbox"], input="n")
+ assert result.exit_code == 0
+ assert "cancelled by user" in result.stdout
diff --git a/tests/config/test_config_sections.py b/tests/config/test_config_sections.py
index 6528b130fa..7d9a0ba873 100644
--- a/tests/config/test_config_sections.py
+++ b/tests/config/test_config_sections.py
@@ -170,6 +170,24 @@ def test_all_databases_must_be_unique(self) -> None:
databases=[DatabaseSystem.POSTGRESQL, DatabaseSystem.POSTGRESQL],
)
+ def test_data_provider_tag_internet(
+ self,
+ config_subsection_remote_desktop: ConfigSubsectionRemoteDesktopOpts,
+ config_subsection_storage_quota_gb: ConfigSubsectionStorageQuotaGB,
+ ):
+ sre_config = ConfigSectionSRE(
+ admin_email_address="admin@example.com",
+ remote_desktop=config_subsection_remote_desktop,
+ storage_quota_gb=config_subsection_storage_quota_gb,
+ data_provider_ip_addresses="Internet",
+ )
+ assert isinstance(sre_config.data_provider_ip_addresses, AzureServiceTag)
+ assert sre_config.data_provider_ip_addresses == "Internet"
+
+ def test_data_provider_tag_invalid(self):
+ with pytest.raises(ValueError, match="Input should be 'Internet'"):
+ ConfigSectionSRE(data_provider_ip_addresses="Not a tag")
+
def test_ip_overlap_admin(self):
with pytest.raises(ValueError, match="IP addresses must not overlap."):
ConfigSectionSRE(
diff --git a/tests/config/test_sre_config.py b/tests/config/test_sre_config.py
index 66bd50a40d..7ac6d61981 100644
--- a/tests/config/test_sre_config.py
+++ b/tests/config/test_sre_config.py
@@ -7,7 +7,6 @@
ConfigSectionDockerHub,
ConfigSectionSRE,
)
-from data_safe_haven.config.sre_config import sre_config_name
from data_safe_haven.exceptions import (
DataSafeHavenTypeError,
)
@@ -126,14 +125,5 @@ def test_upload(self, mocker, context, sre_config) -> None:
context.storage_container_name,
)
-
-@pytest.mark.parametrize(
- "value,expected",
- [
- (r"Test SRE", "sre-testsre.yaml"),
- (r"*a^b$c", "sre-abc.yaml"),
- (r";'@-", "sre-.yaml"),
- ],
-)
-def test_sre_config_name(value, expected):
- assert sre_config_name(value) == expected
+ def test_sre_config_yaml_name(self, sre_config: SREConfig) -> None:
+ assert sre_config.filename == "sre-sandbox.yaml"
diff --git a/tests/functions/test_strings.py b/tests/functions/test_strings.py
index 3e57965d98..7bd12d9490 100644
--- a/tests/functions/test_strings.py
+++ b/tests/functions/test_strings.py
@@ -4,7 +4,6 @@
from data_safe_haven.exceptions import DataSafeHavenValueError
from data_safe_haven.functions import (
get_key_vault_name,
- json_safe,
next_occurrence,
)
@@ -70,11 +69,3 @@ def test_invalid_timeformat(self):
)
def test_get_key_vault_name(value, expected):
assert get_key_vault_name(value) == expected
-
-
-@pytest.mark.parametrize(
- "value,expected",
- [(r"Test SRE", "testsre"), (r"%*aBc", "abc"), (r"MY_SRE", "mysre")],
-)
-def test_json_safe(value, expected):
- assert json_safe(value) == expected
diff --git a/tests/validators/test_validators.py b/tests/validators/test_validators.py
index 18d2fd31b5..c8447ab441 100644
--- a/tests/validators/test_validators.py
+++ b/tests/validators/test_validators.py
@@ -111,7 +111,7 @@ def test_ip_address(self, ip_address, output):
def test_ip_address_fail(self, ip_address):
with pytest.raises(
ValueError,
- match="Expected valid IPv4 address, for example '1.1.1.1', or 'Internet'.",
+ match="Expected valid IPv4 address, for example '1.1.1.1'.",
):
validators.ip_address(ip_address)