Compare commits
	
		
			32 Commits
		
	
	
		
			version/20
			...
			web/design
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 977e73b9d8 | |||
| 2ddb7e1e97 | |||
| 246423b2be | |||
| 457b61c5b4 | |||
| 25eefb7d55 | |||
| 50d2f69332 | |||
| 7d972ec711 | |||
| 854427e463 | |||
| be349e2e14 | |||
| bd0e81b8ad | |||
| f6afb59515 | |||
| dddde09be5 | |||
| 6d7fc94698 | |||
| 1dcf9108ad | |||
| 7bb6a3dfe6 | |||
| 9cc440eee1 | |||
| fe9e4526ac | |||
| 20b66f850c | |||
| 67b327414b | |||
| 5b8d86b5a9 | |||
| 67aed3e318 | |||
| 9809b94030 | |||
| e7527c551b | |||
| 36b10b434a | |||
| 831797b871 | |||
| 5cc2c0f45f | |||
| 32442766f4 | |||
| 75790909a8 | |||
| e0d5df89ca | |||
| f25a9c624e | |||
| 914993a788 | |||
| 89dad07a66 | 
| @ -1,16 +1,16 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.2.0 | ||||
| current_version = 2024.12.2 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| serialize =  | ||||
| serialize = | ||||
| 	{major}.{minor}.{patch}-{rc_t}{rc_n} | ||||
| 	{major}.{minor}.{patch} | ||||
| message = release: {new_version} | ||||
| tag_name = version/{new_version} | ||||
|  | ||||
| [bumpversion:part:rc_t] | ||||
| values =  | ||||
| values = | ||||
| 	rc | ||||
| 	final | ||||
| optional_value = final | ||||
|  | ||||
| @ -40,7 +40,7 @@ jobs: | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: docker/setup-qemu-action@v3.4.0 | ||||
|       - uses: docker/setup-qemu-action@v3.3.0 | ||||
|       - uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
| @ -77,7 +77,7 @@ jobs: | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           push: true | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
| @ -89,7 +89,6 @@ jobs: | ||||
|           cache-to: ${{ steps.ev.outputs.cacheTo }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|  | ||||
| @ -46,7 +46,6 @@ jobs: | ||||
|       - build-server-arm64 | ||||
|     outputs: | ||||
|       tags: ${{ steps.ev.outputs.imageTagsJSON }} | ||||
|       shouldPush: ${{ steps.ev.outputs.shouldPush }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: prepare variables | ||||
| @ -58,7 +57,6 @@ jobs: | ||||
|           image-name: ${{ inputs.image_name }} | ||||
|   merge-server: | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ needs.get-tags.outputs.shouldPush == 'true' }} | ||||
|     needs: | ||||
|       - get-tags | ||||
|       - build-server-amd64 | ||||
|  | ||||
							
								
								
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,28 +0,0 @@ | ||||
| --- | ||||
| name: authentik-ci-main-daily | ||||
|  | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     # Every night at 3am | ||||
|     - cron: "0 3 * * *" | ||||
|  | ||||
| jobs: | ||||
|   test-container: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         version: | ||||
|           - docs | ||||
|           - version-2024-12 | ||||
|           - version-2024-10 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - run: | | ||||
|           current="$(pwd)" | ||||
|           dir="/tmp/authentik/${{ matrix.version }}" | ||||
|           mkdir -p $dir | ||||
|           cd $dir | ||||
|           wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml | ||||
|           ${current}/scripts/test_docker.sh | ||||
							
								
								
									
										31
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -43,26 +43,15 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run migrations | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-make-seed: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: seed | ||||
|         run: | | ||||
|           echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT" | ||||
|     outputs: | ||||
|       seed: ${{ steps.seed.outputs.seed }} | ||||
|   test-migrations-from-stable: | ||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
| @ -104,23 +93,18 @@ jobs: | ||||
|         env: | ||||
|           # Test in the main database that we just migrated from the previous stable version | ||||
|           AUTHENTIK_POSTGRESQL__TEST__NAME: authentik | ||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           poetry run make ci-test | ||||
|           poetry run make test | ||||
|   test-unittest: | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
|     timeout-minutes: 30 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
| @ -128,12 +112,9 @@ jobs: | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|       - name: run unittest | ||||
|         env: | ||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           poetry run make ci-test | ||||
|           poetry run make test | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -82,7 +82,7 @@ jobs: | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.4.0 | ||||
|         uses: docker/setup-qemu-action@v3.3.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|  | ||||
							
								
								
									
										10
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,17 +9,9 @@ jobs: | ||||
|   build-server: | ||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||
|     secrets: inherit | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     with: | ||||
|       image_name: ghcr.io/goauthentik/server,beryju/authentik | ||||
|       release: true | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -42,7 +34,7 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.4.0 | ||||
|         uses: docker/setup-qemu-action@v3.3.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|  | ||||
							
								
								
									
										11
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,7 +14,16 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Pre-release test | ||||
|         run: | | ||||
|           make test-docker | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           docker buildx install | ||||
|           mkdir -p ./gen-ts-api | ||||
|           docker build -t testing:latest . | ||||
|           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||
|           echo "AUTHENTIK_TAG=latest" >> .env | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										6
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,8 +1,8 @@ | ||||
| name: "authentik-repo-stale" | ||||
| name: 'authentik-repo-stale' | ||||
|  | ||||
| on: | ||||
|   schedule: | ||||
|     - cron: "30 1 * * *" | ||||
|     - cron: '30 1 * * *' | ||||
|   workflow_dispatch: | ||||
|  | ||||
| permissions: | ||||
| @ -25,7 +25,7 @@ jobs: | ||||
|           days-before-stale: 60 | ||||
|           days-before-close: 7 | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing | ||||
|           stale-issue-label: status/stale | ||||
|           stale-issue-label: wontfix | ||||
|           stale-issue-message: > | ||||
|             This issue has been automatically marked as stale because it has not had | ||||
|             recent activity. It will be closed if no further activity occurs. Thank you | ||||
|  | ||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -209,6 +209,3 @@ source_docs/ | ||||
|  | ||||
| ### Golang ### | ||||
| /vendor/ | ||||
|  | ||||
| ### Docker ### | ||||
| docker-compose.override.yml | ||||
|  | ||||
							
								
								
									
										7
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,6 @@ | ||||
|     "recommendations": [ | ||||
|         "bashmish.es6-string-css", | ||||
|         "bpruitt-goddard.mermaid-markdown-syntax-highlighting", | ||||
|         "charliermarsh.ruff", | ||||
|         "dbaeumer.vscode-eslint", | ||||
|         "EditorConfig.EditorConfig", | ||||
|         "esbenp.prettier-vscode", | ||||
| @ -11,12 +10,12 @@ | ||||
|         "Gruntfuggly.todo-tree", | ||||
|         "mechatroner.rainbow-csv", | ||||
|         "ms-python.black-formatter", | ||||
|         "ms-python.black-formatter", | ||||
|         "ms-python.debugpy", | ||||
|         "charliermarsh.ruff", | ||||
|         "ms-python.python", | ||||
|         "ms-python.vscode-pylance", | ||||
|         "ms-python.black-formatter", | ||||
|         "redhat.vscode-yaml", | ||||
|         "Tobermory.es6-string-html", | ||||
|         "unifiedjs.vscode-mdx", | ||||
|         "unifiedjs.vscode-mdx" | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -2,76 +2,26 @@ | ||||
|     "version": "0.2.0", | ||||
|     "configurations": [ | ||||
|         { | ||||
|             "name": "Debug: Attach Server Core", | ||||
|             "type": "debugpy", | ||||
|             "name": "Python: PDB attach Server", | ||||
|             "type": "python", | ||||
|             "request": "attach", | ||||
|             "connect": { | ||||
|                 "host": "localhost", | ||||
|                 "port": 9901 | ||||
|                 "port": 6800 | ||||
|             }, | ||||
|             "pathMappings": [ | ||||
|                 { | ||||
|                     "localRoot": "${workspaceFolder}", | ||||
|                     "remoteRoot": "." | ||||
|                 } | ||||
|             ], | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Attach Worker", | ||||
|             "type": "debugpy", | ||||
|             "name": "Python: PDB attach Worker", | ||||
|             "type": "python", | ||||
|             "request": "attach", | ||||
|             "connect": { | ||||
|                 "host": "localhost", | ||||
|                 "port": 9901 | ||||
|                 "port": 6900 | ||||
|             }, | ||||
|             "pathMappings": [ | ||||
|                 { | ||||
|                     "localRoot": "${workspaceFolder}", | ||||
|                     "remoteRoot": "." | ||||
|                 } | ||||
|             ], | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Server Router", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/server", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start LDAP Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/ldap", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Proxy Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/proxy", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start RAC Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/rac", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Radius Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/radius", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         } | ||||
|     ] | ||||
| } | ||||
|  | ||||
| @ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| @ -139,7 +139,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||
|  | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
|  | ||||
							
								
								
									
										28
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										28
									
								
								Makefile
									
									
									
									
									
								
							| @ -6,8 +6,6 @@ UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| GO_SOURCES = cmd internal | ||||
| WEB_SOURCES = web/src web/packages | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| @ -22,11 +20,10 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||
| 		-I .github/codespell-words.txt \ | ||||
| 		-S 'web/src/locales/**' \ | ||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||
| 		-S '**/node_modules/**' \ | ||||
| 		-S '**/dist/**' \ | ||||
| 		$(PY_SOURCES) \ | ||||
| 		$(GO_SOURCES) \ | ||||
| 		$(WEB_SOURCES) \ | ||||
| 		authentik \ | ||||
| 		internal \ | ||||
| 		cmd \ | ||||
| 		web/src \ | ||||
| 		website/src \ | ||||
| 		website/blog \ | ||||
| 		website/docs \ | ||||
| @ -48,6 +45,15 @@ help:  ## Show this help | ||||
| go-test: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test-docker:  ## Run all tests in a docker-compose | ||||
| 	echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	docker compose pull -q | ||||
| 	docker compose up --no-start | ||||
| 	docker compose start postgresql redis | ||||
| 	docker compose run -u root server test-all | ||||
| 	rm -f .env | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	coverage run manage.py test --keepdb authentik | ||||
| 	coverage html | ||||
| @ -257,9 +263,6 @@ docker:  ## Build a docker image of the current source tree | ||||
| 	mkdir -p ${GEN_API_TS} | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| test-docker: | ||||
| 	BUILD=true ./scripts/test_docker.sh | ||||
|  | ||||
| ######################### | ||||
| ## CI | ||||
| ######################### | ||||
| @ -284,8 +287,3 @@ ci-bandit: ci--meta-debug | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	ak makemigrations --check | ||||
|  | ||||
| ci-test: ci--meta-debug | ||||
| 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	coverage report | ||||
| 	coverage xml | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.2.0" | ||||
| __version__ = "2024.12.2" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -50,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import ( | ||||
|     MicrosoftEntraProviderGroup, | ||||
|     MicrosoftEntraProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.models import StreamEvent | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( | ||||
|     EndpointDevice, | ||||
|     EndpointDeviceConnection, | ||||
| @ -71,7 +71,6 @@ from authentik.providers.oauth2.models import ( | ||||
|     DeviceToken, | ||||
|     RefreshToken, | ||||
| ) | ||||
| from authentik.providers.rac.models import ConnectionToken | ||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.rbac.models import Role | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| @ -132,7 +131,6 @@ def excluded_models() -> list[type[Model]]: | ||||
|         EndpointDevice, | ||||
|         EndpointDeviceConnection, | ||||
|         DeviceToken, | ||||
|         StreamEvent, | ||||
|     ) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -3,7 +3,6 @@ | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
|     CharField, | ||||
| @ -17,6 +16,7 @@ from rest_framework.viewsets import ViewSet | ||||
|  | ||||
| from authentik.core.api.utils import MetaNameSerializer | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.stages.authenticator import device_classes, devices_for_user | ||||
| from authentik.stages.authenticator.models import Device | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
| @ -73,9 +73,7 @@ class AdminDeviceViewSet(ViewSet): | ||||
|     def get_devices(self, **kwargs): | ||||
|         """Get all devices in all child classes""" | ||||
|         for model in device_classes(): | ||||
|             device_set = get_objects_for_user( | ||||
|                 self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model | ||||
|             ).filter(**kwargs) | ||||
|             device_set = model.objects.filter(**kwargs) | ||||
|             yield from device_set | ||||
|  | ||||
|     @extend_schema( | ||||
| @ -88,6 +86,10 @@ class AdminDeviceViewSet(ViewSet): | ||||
|         ], | ||||
|         responses={200: DeviceSerializer(many=True)}, | ||||
|     ) | ||||
|     @permission_required( | ||||
|         None, | ||||
|         [f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()], | ||||
|     ) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """Get all devices for current user""" | ||||
|         kwargs = {} | ||||
|  | ||||
| @ -4,7 +4,6 @@ from json import loads | ||||
|  | ||||
| from django.db.models import Prefetch | ||||
| from django.http import Http404 | ||||
| from django.utils.translation import gettext as _ | ||||
| from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | ||||
| from django_filters.filterset import FilterSet | ||||
| from drf_spectacular.utils import ( | ||||
| @ -82,37 +81,9 @@ class GroupSerializer(ModelSerializer): | ||||
|         if not self.instance or not parent: | ||||
|             return parent | ||||
|         if str(parent.group_uuid) == str(self.instance.group_uuid): | ||||
|             raise ValidationError(_("Cannot set group as parent of itself.")) | ||||
|             raise ValidationError("Cannot set group as parent of itself.") | ||||
|         return parent | ||||
|  | ||||
|     def validate_is_superuser(self, superuser: bool): | ||||
|         """Ensure that the user creating this group has permissions to set the superuser flag""" | ||||
|         request: Request = self.context.get("request", None) | ||||
|         if not request: | ||||
|             return superuser | ||||
|         # If we're updating an instance, and the state hasn't changed, we don't need to check perms | ||||
|         if self.instance and superuser == self.instance.is_superuser: | ||||
|             return superuser | ||||
|         user: User = request.user | ||||
|         perm = ( | ||||
|             "authentik_core.enable_group_superuser" | ||||
|             if superuser | ||||
|             else "authentik_core.disable_group_superuser" | ||||
|         ) | ||||
|         has_perm = user.has_perm(perm) | ||||
|         if self.instance and not has_perm: | ||||
|             has_perm = user.has_perm(perm, self.instance) | ||||
|         if not has_perm: | ||||
|             raise ValidationError( | ||||
|                 _( | ||||
|                     ( | ||||
|                         "User does not have permission to set " | ||||
|                         "superuser status to {superuser_status}." | ||||
|                     ).format_map({"superuser_status": superuser}) | ||||
|                 ) | ||||
|             ) | ||||
|         return superuser | ||||
|  | ||||
|     class Meta: | ||||
|         model = Group | ||||
|         fields = [ | ||||
|  | ||||
| @ -85,7 +85,7 @@ class SourceViewSet( | ||||
|     serializer_class = SourceSerializer | ||||
|     lookup_field = "slug" | ||||
|     search_fields = ["slug", "name"] | ||||
|     filterset_fields = ["slug", "name", "managed", "pbm_uuid"] | ||||
|     filterset_fields = ["slug", "name", "managed"] | ||||
|  | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Source.objects.select_subclasses() | ||||
|  | ||||
| @ -236,11 +236,9 @@ class UserSerializer(ModelSerializer): | ||||
|             "path", | ||||
|             "type", | ||||
|             "uuid", | ||||
|             "password_change_date", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "name": {"allow_blank": True}, | ||||
|             "password_change_date": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -5,7 +5,6 @@ from typing import TextIO | ||||
| from daphne.management.commands.runserver import Command as RunServer | ||||
| from daphne.server import Server | ||||
|  | ||||
| from authentik.lib.debug import start_debug_server | ||||
| from authentik.root.signals import post_startup, pre_startup, startup | ||||
|  | ||||
|  | ||||
| @ -14,7 +13,6 @@ class SignalServer(Server): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         start_debug_server() | ||||
|  | ||||
|         def ready_callable(): | ||||
|             pre_startup.send(sender=self) | ||||
|  | ||||
| @ -9,7 +9,6 @@ from django.db import close_old_connections | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.debug import start_debug_server | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| @ -29,7 +28,10 @@ class Command(BaseCommand): | ||||
|     def handle(self, **options): | ||||
|         LOGGER.debug("Celery options", **options) | ||||
|         close_old_connections() | ||||
|         start_debug_server() | ||||
|         if CONFIG.get_bool("remote_debug"): | ||||
|             import debugpy | ||||
|  | ||||
|             debugpy.listen(("0.0.0.0", 6900))  # nosec | ||||
|         worker: Worker = CELERY_APP.Worker( | ||||
|             no_color=False, | ||||
|             quiet=True, | ||||
|  | ||||
| @ -1,26 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-30 23:55 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterModelOptions( | ||||
|             name="group", | ||||
|             options={ | ||||
|                 "permissions": [ | ||||
|                     ("add_user_to_group", "Add user to group"), | ||||
|                     ("remove_user_from_group", "Remove user from group"), | ||||
|                     ("enable_group_superuser", "Enable superuser status"), | ||||
|                     ("disable_group_superuser", "Disable superuser status"), | ||||
|                 ], | ||||
|                 "verbose_name": "Group", | ||||
|                 "verbose_name_plural": "Groups", | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -204,8 +204,6 @@ class Group(SerializerModel, AttributesMixin): | ||||
|         permissions = [ | ||||
|             ("add_user_to_group", _("Add user to group")), | ||||
|             ("remove_user_from_group", _("Remove user from group")), | ||||
|             ("enable_group_superuser", _("Enable superuser status")), | ||||
|             ("disable_group_superuser", _("Disable superuser status")), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self): | ||||
| @ -601,14 +599,6 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|             return None | ||||
|         return candidates[-1] | ||||
|  | ||||
|     def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None: | ||||
|         """Get Backchannel provider for a specific type""" | ||||
|         providers = self.backchannel_providers.filter( | ||||
|             **{f"{provider_type._meta.model_name}__isnull": False}, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         return getattr(providers.first(), provider_type._meta.model_name) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return str(self.name) | ||||
|  | ||||
|  | ||||
| @ -35,7 +35,8 @@ from authentik.flows.planner import ( | ||||
|     FlowPlanner, | ||||
| ) | ||||
| from authentik.flows.stage import StageView | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN | ||||
| from authentik.lib.utils.urls import redirect_with_qs | ||||
| from authentik.lib.views import bad_request_message | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.utils import delete_none_values | ||||
| @ -46,9 +47,8 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
|  | ||||
|  | ||||
| class MessageStage(StageView): | ||||
| @ -219,28 +219,28 @@ class SourceFlowManager: | ||||
|             } | ||||
|         ) | ||||
|         flow_context.update(self.policy_context) | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||
|             plan = token.plan | ||||
|             plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|             plan.context.update(flow_context) | ||||
|             for stage in self.get_stages_to_append(flow): | ||||
|                 plan.append_stage(stage) | ||||
|             if stages: | ||||
|                 for stage in stages: | ||||
|                     plan.append_stage(stage) | ||||
|             self.request.session[SESSION_KEY_PLAN] = plan | ||||
|             flow_slug = token.flow.slug | ||||
|             token.delete() | ||||
|             return redirect_with_qs( | ||||
|                 "authentik_core:if-flow", | ||||
|                 self.request.GET, | ||||
|                 flow_slug=flow_slug, | ||||
|             ) | ||||
|         flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect) | ||||
|  | ||||
|         if not flow: | ||||
|             # We only check for the flow token here if we don't have a flow, otherwise we rely on | ||||
|             # SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add | ||||
|             # stages that deal with this token to return to another flow | ||||
|             if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|                 token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|                 self._logger.info( | ||||
|                     "Replacing source flow with overridden flow", flow=token.flow.slug | ||||
|                 ) | ||||
|                 plan = token.plan | ||||
|                 plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|                 plan.context.update(flow_context) | ||||
|                 for stage in self.get_stages_to_append(flow): | ||||
|                     plan.append_stage(stage) | ||||
|                 if stages: | ||||
|                     for stage in stages: | ||||
|                         plan.append_stage(stage) | ||||
|                 redirect = plan.to_redirect(self.request, token.flow) | ||||
|                 token.delete() | ||||
|                 return redirect | ||||
|             return bad_request_message( | ||||
|                 self.request, | ||||
|                 _("Configured flow does not exist."), | ||||
| @ -259,8 +259,6 @@ class SourceFlowManager: | ||||
|         if stages: | ||||
|             for stage in stages: | ||||
|                 plan.append_stage(stage) | ||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||
|             plan.append_stage(stage) | ||||
|         return plan.to_redirect(self.request, flow) | ||||
|  | ||||
|     def handle_auth( | ||||
| @ -297,8 +295,6 @@ class SourceFlowManager: | ||||
|         # When request isn't authenticated we jump straight to auth | ||||
|         if not self.request.user.is_authenticated: | ||||
|             return self.handle_auth(connection) | ||||
|         # When an override flow token exists we actually still use a flow for link | ||||
|         # to continue the existing flow we came from | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             return self._prepare_flow(None, connection) | ||||
|         connection.save() | ||||
|  | ||||
| @ -67,8 +67,6 @@ def clean_expired_models(self: SystemTask): | ||||
|                 raise ImproperlyConfigured( | ||||
|                     "Invalid session_storage setting, allowed values are db and cache" | ||||
|                 ) | ||||
|     if CONFIG.get("session_storage", "cache") == "db": | ||||
|         DBSessionStore.clear_expired() | ||||
|     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||
|  | ||||
|     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||
|  | ||||
| @ -8,8 +8,6 @@ | ||||
|     <head> | ||||
|         <meta charset="UTF-8"> | ||||
|         <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> | ||||
|         {# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #} | ||||
|         <meta name="darkreader-lock"> | ||||
|         <title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title> | ||||
|         <link rel="icon" href="{{ brand.branding_favicon_url }}"> | ||||
|         <link rel="shortcut icon" href="{{ brand.branding_favicon_url }}"> | ||||
|  | ||||
| @ -4,7 +4,7 @@ from django.urls.base import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Group | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
| @ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.login_user = create_test_user() | ||||
|         self.user = create_test_user() | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_list_with_users(self): | ||||
|         """Test listing with users""" | ||||
| @ -109,57 +109,3 @@ class TestGroupsAPI(APITestCase): | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|  | ||||
|     def test_superuser_no_perm(self): | ||||
|         """Test creating a superuser group without permission""" | ||||
|         assign_perm("authentik_core.add_group", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-list"), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content, | ||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||
|         ) | ||||
|  | ||||
|     def test_superuser_update_no_perm(self): | ||||
|         """Test updating a superuser group without permission""" | ||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||
|         assign_perm("view_group", self.login_user, group) | ||||
|         assign_perm("change_group", self.login_user, group) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.patch( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             data={"is_superuser": False}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content, | ||||
|             {"is_superuser": ["User does not have permission to set superuser status to False."]}, | ||||
|         ) | ||||
|  | ||||
|     def test_superuser_update_no_change(self): | ||||
|         """Test updating a superuser group without permission | ||||
|         and without changing the superuser status""" | ||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||
|         assign_perm("view_group", self.login_user, group) | ||||
|         assign_perm("change_group", self.login_user, group) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.patch( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|  | ||||
|     def test_superuser_create(self): | ||||
|         """Test creating a superuser group with permission""" | ||||
|         assign_perm("authentik_core.add_group", self.login_user) | ||||
|         assign_perm("authentik_core.enable_group_superuser", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-list"), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201) | ||||
|  | ||||
| @ -97,8 +97,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware): | ||||
|         thread_kwargs: dict | None = None, | ||||
|         **_, | ||||
|     ): | ||||
|         if not self.enabled: | ||||
|             return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_) | ||||
|         if not should_log_model(instance): | ||||
|             return None | ||||
|         thread_kwargs = {} | ||||
| @ -124,8 +122,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware): | ||||
|     ): | ||||
|         thread_kwargs = {} | ||||
|         m2m_field = None | ||||
|         if not self.enabled: | ||||
|             return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs) | ||||
|         # For the audit log we don't care about `pre_` or `post_` so we trim that part off | ||||
|         _, _, action_direction = action.partition("_") | ||||
|         # resolve the "through" model to an actual field | ||||
|  | ||||
| @ -6,12 +6,13 @@ from rest_framework.viewsets import GenericViewSet | ||||
| from authentik.core.api.groups import GroupMemberSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.providers.rac.api.endpoints import EndpointSerializer | ||||
| from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.providers.rac.models import ConnectionToken | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| 
 | ||||
| 
 | ||||
| class ConnectionTokenSerializer(ModelSerializer): | ||||
| class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer): | ||||
|     """ConnectionToken Serializer""" | ||||
| 
 | ||||
|     provider_obj = RACProviderSerializer(source="provider", read_only=True) | ||||
| @ -14,9 +14,10 @@ from structlog.stdlib import get_logger | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import Provider | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.enterprise.providers.rac.models import Endpoint | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.providers.rac.models import Endpoint | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
| 
 | ||||
| LOGGER = get_logger() | ||||
| @ -27,7 +28,7 @@ def user_endpoint_cache_key(user_pk: str) -> str: | ||||
|     return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}" | ||||
| 
 | ||||
| 
 | ||||
| class EndpointSerializer(ModelSerializer): | ||||
| class EndpointSerializer(EnterpriseRequiredMixin, ModelSerializer): | ||||
|     """Endpoint Serializer""" | ||||
| 
 | ||||
|     provider_obj = RACProviderSerializer(source="provider", read_only=True) | ||||
| @ -10,7 +10,7 @@ from rest_framework.viewsets import ModelViewSet | ||||
| from authentik.core.api.property_mappings import PropertyMappingSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField | ||||
| from authentik.providers.rac.models import RACPropertyMapping | ||||
| from authentik.enterprise.providers.rac.models import RACPropertyMapping | ||||
| 
 | ||||
| 
 | ||||
| class RACPropertyMappingSerializer(PropertyMappingSerializer): | ||||
| @ -5,10 +5,11 @@ from rest_framework.viewsets import ModelViewSet | ||||
| 
 | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.providers.rac.models import RACProvider | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.models import RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class RACProviderSerializer(ProviderSerializer): | ||||
| class RACProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer): | ||||
|     """RACProvider Serializer""" | ||||
| 
 | ||||
|     outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all") | ||||
							
								
								
									
										14
									
								
								authentik/enterprise/providers/rac/apps.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								authentik/enterprise/providers/rac/apps.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,14 @@ | ||||
| """RAC app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseProviderRAC(EnterpriseConfig): | ||||
|     """authentik enterprise rac app config""" | ||||
|  | ||||
|     name = "authentik.enterprise.providers.rac" | ||||
|     label = "authentik_providers_rac" | ||||
|     verbose_name = "authentik Enterprise.Providers.RAC" | ||||
|     default = True | ||||
|     mountpoint = "" | ||||
|     ws_mountpoint = "authentik.enterprise.providers.rac.urls" | ||||
| @ -7,22 +7,22 @@ from channels.generic.websocket import AsyncWebsocketConsumer | ||||
| from django.http.request import QueryDict | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| 
 | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, RACProvider | ||||
| from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE | ||||
| from authentik.outposts.models import Outpost, OutpostState, OutpostType | ||||
| from authentik.providers.rac.models import ConnectionToken, RACProvider | ||||
| 
 | ||||
| # Global broadcast group, which messages are sent to when the outpost connects back | ||||
| # to authentik for a specific connection | ||||
| # The `RACClientConsumer` consumer adds itself to this group on connection, | ||||
| # and removes itself once it has been assigned a specific outpost channel | ||||
| RAC_CLIENT_GROUP = "group_rac_client" | ||||
| RAC_CLIENT_GROUP = "group_enterprise_rac_client" | ||||
| # A group for all connections in a given authentik session ID | ||||
| # A disconnect message is sent to this group when the session expires/is deleted | ||||
| RAC_CLIENT_GROUP_SESSION = "group_rac_client_%(session)s" | ||||
| RAC_CLIENT_GROUP_SESSION = "group_enterprise_rac_client_%(session)s" | ||||
| # A group for all connections with a specific token, which in almost all cases | ||||
| # is just one connection, however this is used to disconnect the connection | ||||
| # when the token is deleted | ||||
| RAC_CLIENT_GROUP_TOKEN = "group_rac_token_%(token)s"  # nosec | ||||
| RAC_CLIENT_GROUP_TOKEN = "group_enterprise_rac_token_%(token)s"  # nosec | ||||
| 
 | ||||
| # Step 1: Client connects to this websocket endpoint | ||||
| # Step 2: We prepare all the connection args for Guac | ||||
| @ -3,7 +3,7 @@ | ||||
| from channels.exceptions import ChannelFull | ||||
| from channels.generic.websocket import AsyncWebsocketConsumer | ||||
| 
 | ||||
| from authentik.providers.rac.consumer_client import RAC_CLIENT_GROUP | ||||
| from authentik.enterprise.providers.rac.consumer_client import RAC_CLIENT_GROUP | ||||
| 
 | ||||
| 
 | ||||
| class RACOutpostConsumer(AsyncWebsocketConsumer): | ||||
| @ -74,7 +74,7 @@ class RACProvider(Provider): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
|         from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| 
 | ||||
|         return RACProviderSerializer | ||||
| 
 | ||||
| @ -100,7 +100,7 @@ class Endpoint(SerializerModel, PolicyBindingModel): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.endpoints import EndpointSerializer | ||||
|         from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer | ||||
| 
 | ||||
|         return EndpointSerializer | ||||
| 
 | ||||
| @ -129,7 +129,7 @@ class RACPropertyMapping(PropertyMapping): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.property_mappings import ( | ||||
|         from authentik.enterprise.providers.rac.api.property_mappings import ( | ||||
|             RACPropertyMappingSerializer, | ||||
|         ) | ||||
| 
 | ||||
| @ -10,12 +10,12 @@ from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
| 
 | ||||
| from authentik.core.models import User | ||||
| from authentik.providers.rac.api.endpoints import user_endpoint_cache_key | ||||
| from authentik.providers.rac.consumer_client import ( | ||||
| from authentik.enterprise.providers.rac.api.endpoints import user_endpoint_cache_key | ||||
| from authentik.enterprise.providers.rac.consumer_client import ( | ||||
|     RAC_CLIENT_GROUP_SESSION, | ||||
|     RAC_CLIENT_GROUP_TOKEN, | ||||
| ) | ||||
| from authentik.providers.rac.models import ConnectionToken, Endpoint | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint | ||||
| 
 | ||||
| 
 | ||||
| @receiver(user_logged_out) | ||||
| @ -3,7 +3,7 @@ | ||||
| {% load authentik_core %} | ||||
| 
 | ||||
| {% block head %} | ||||
| <script src="{% versioned_script 'dist/rac/index-%v.js' %}" type="module"></script> | ||||
| <script src="{% versioned_script 'dist/enterprise/rac/index-%v.js' %}" type="module"></script> | ||||
| <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||
| <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||
| <link rel="icon" href="{{ tenant.branding_favicon_url }}"> | ||||
| @ -1,9 +1,16 @@ | ||||
| """Test RAC Provider""" | ||||
| 
 | ||||
| from datetime import timedelta | ||||
| from time import mktime | ||||
| from unittest.mock import MagicMock, patch | ||||
| 
 | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now | ||||
| from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import License | ||||
| from authentik.lib.generators import generate_id | ||||
| 
 | ||||
| 
 | ||||
| @ -13,8 +20,21 @@ class TestAPI(APITestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.user = create_test_admin_user() | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_create(self): | ||||
|         """Test creation of RAC Provider""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:racprovider-list"), | ||||
| @ -5,10 +5,10 @@ from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class TestEndpointsAPI(APITestCase): | ||||
| @ -4,14 +4,14 @@ from django.test import TransactionTestCase | ||||
| 
 | ||||
| from authentik.core.models import Application, AuthenticatedSession | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.providers.rac.models import ( | ||||
| from authentik.enterprise.providers.rac.models import ( | ||||
|     ConnectionToken, | ||||
|     Endpoint, | ||||
|     Protocols, | ||||
|     RACPropertyMapping, | ||||
|     RACProvider, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
| 
 | ||||
| 
 | ||||
| class TestModels(TransactionTestCase): | ||||
| @ -1,17 +1,23 @@ | ||||
| """RAC Views tests""" | ||||
| 
 | ||||
| from datetime import timedelta | ||||
| from json import loads | ||||
| from time import mktime | ||||
| from unittest.mock import MagicMock, patch | ||||
| 
 | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now | ||||
| from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import License | ||||
| from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class TestRACViews(APITestCase): | ||||
| @ -33,8 +39,21 @@ class TestRACViews(APITestCase): | ||||
|             provider=self.provider, | ||||
|         ) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_no_policy(self): | ||||
|         """Test request""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -51,6 +70,18 @@ class TestRACViews(APITestCase): | ||||
|         final_response = self.client.get(next_url) | ||||
|         self.assertEqual(final_response.status_code, 200) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_app_deny(self): | ||||
|         """Test request (deny on app level)""" | ||||
|         PolicyBinding.objects.create( | ||||
| @ -58,6 +89,7 @@ class TestRACViews(APITestCase): | ||||
|             policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2), | ||||
|             order=0, | ||||
|         ) | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -67,6 +99,18 @@ class TestRACViews(APITestCase): | ||||
|         ) | ||||
|         self.assertIsInstance(response, AccessDeniedResponse) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_endpoint_deny(self): | ||||
|         """Test request (deny on endpoint level)""" | ||||
|         PolicyBinding.objects.create( | ||||
| @ -74,6 +118,7 @@ class TestRACViews(APITestCase): | ||||
|             policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2), | ||||
|             order=0, | ||||
|         ) | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -4,14 +4,14 @@ from channels.auth import AuthMiddleware | ||||
| from channels.sessions import CookieMiddleware | ||||
| from django.urls import path | ||||
| 
 | ||||
| from authentik.enterprise.providers.rac.api.connection_tokens import ConnectionTokenViewSet | ||||
| from authentik.enterprise.providers.rac.api.endpoints import EndpointViewSet | ||||
| from authentik.enterprise.providers.rac.api.property_mappings import RACPropertyMappingViewSet | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderViewSet | ||||
| from authentik.enterprise.providers.rac.consumer_client import RACClientConsumer | ||||
| from authentik.enterprise.providers.rac.consumer_outpost import RACOutpostConsumer | ||||
| from authentik.enterprise.providers.rac.views import RACInterface, RACStartView | ||||
| from authentik.outposts.channels import TokenOutpostMiddleware | ||||
| from authentik.providers.rac.api.connection_tokens import ConnectionTokenViewSet | ||||
| from authentik.providers.rac.api.endpoints import EndpointViewSet | ||||
| from authentik.providers.rac.api.property_mappings import RACPropertyMappingViewSet | ||||
| from authentik.providers.rac.api.providers import RACProviderViewSet | ||||
| from authentik.providers.rac.consumer_client import RACClientConsumer | ||||
| from authentik.providers.rac.consumer_outpost import RACOutpostConsumer | ||||
| from authentik.providers.rac.views import RACInterface, RACStartView | ||||
| from authentik.root.asgi_middleware import SessionMiddleware | ||||
| from authentik.root.middleware import ChannelsLoggingMiddleware | ||||
| 
 | ||||
| @ -10,6 +10,8 @@ from django.utils.translation import gettext as _ | ||||
| 
 | ||||
| from authentik.core.models import Application, AuthenticatedSession | ||||
| from authentik.core.views.interface import InterfaceView | ||||
| from authentik.enterprise.policy import EnterprisePolicyAccessView | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint, RACProvider | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.flows.challenge import RedirectChallenge | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| @ -18,11 +20,9 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner | ||||
| from authentik.flows.stage import RedirectStage | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.policies.views import PolicyAccessView | ||||
| from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class RACStartView(PolicyAccessView): | ||||
| class RACStartView(EnterprisePolicyAccessView): | ||||
|     """Start a RAC connection by checking access and creating a connection token""" | ||||
| 
 | ||||
|     endpoint: Endpoint | ||||
| @ -1,64 +0,0 @@ | ||||
| """SSF Provider API Views""" | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.tokens import TokenSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
|  | ||||
|  | ||||
| class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer): | ||||
|     """SSFProvider Serializer""" | ||||
|  | ||||
|     ssf_url = SerializerMethodField() | ||||
|     token_obj = TokenSerializer(source="token", required=False, read_only=True) | ||||
|  | ||||
|     def get_ssf_url(self, instance: SSFProvider) -> str | None: | ||||
|         request: Request = self._context.get("request") | ||||
|         if not request: | ||||
|             return None | ||||
|         if not instance.backchannel_application: | ||||
|             return None | ||||
|         return request.build_absolute_uri( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={ | ||||
|                     "application_slug": instance.backchannel_application.slug, | ||||
|                 }, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     class Meta: | ||||
|         model = SSFProvider | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "name", | ||||
|             "component", | ||||
|             "verbose_name", | ||||
|             "verbose_name_plural", | ||||
|             "meta_model_name", | ||||
|             "signing_key", | ||||
|             "token_obj", | ||||
|             "oidc_auth_providers", | ||||
|             "ssf_url", | ||||
|             "event_retention", | ||||
|         ] | ||||
|         extra_kwargs = {} | ||||
|  | ||||
|  | ||||
| class SSFProviderViewSet(UsedByMixin, ModelViewSet): | ||||
|     """SSFProvider Viewset""" | ||||
|  | ||||
|     queryset = SSFProvider.objects.all() | ||||
|     serializer_class = SSFProviderSerializer | ||||
|     filterset_fields = { | ||||
|         "application": ["isnull"], | ||||
|         "name": ["iexact"], | ||||
|     } | ||||
|     search_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
| @ -1,37 +0,0 @@ | ||||
| """SSF Stream API Views""" | ||||
|  | ||||
| from rest_framework.viewsets import ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer | ||||
| from authentik.enterprise.providers.ssf.models import Stream | ||||
|  | ||||
|  | ||||
| class SSFStreamSerializer(ModelSerializer): | ||||
|     """SSFStream Serializer""" | ||||
|  | ||||
|     provider_obj = SSFProviderSerializer(source="provider", read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Stream | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "provider", | ||||
|             "provider_obj", | ||||
|             "delivery_method", | ||||
|             "endpoint_url", | ||||
|             "events_requested", | ||||
|             "format", | ||||
|             "aud", | ||||
|             "iss", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class SSFStreamViewSet(ReadOnlyModelViewSet): | ||||
|     """SSFStream Viewset""" | ||||
|  | ||||
|     queryset = Stream.objects.all() | ||||
|     serializer_class = SSFStreamSerializer | ||||
|     filterset_fields = ["provider", "endpoint_url", "delivery_method"] | ||||
|     search_fields = ["provider__name", "endpoint_url"] | ||||
|     ordering = ["provider", "uuid"] | ||||
| @ -1,13 +0,0 @@ | ||||
| """SSF app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseProviderSSF(EnterpriseConfig): | ||||
|     """authentik enterprise ssf app config""" | ||||
|  | ||||
|     name = "authentik.enterprise.providers.ssf" | ||||
|     label = "authentik_providers_ssf" | ||||
|     verbose_name = "authentik Enterprise.Providers.SSF" | ||||
|     default = True | ||||
|     mountpoint = "" | ||||
| @ -1,201 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-02-05 16:20 | ||||
|  | ||||
| import authentik.lib.utils.time | ||||
| import django.contrib.postgres.fields | ||||
| import django.db.models.deletion | ||||
| import uuid | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"), | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="SSFProvider", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "provider_ptr", | ||||
|                     models.OneToOneField( | ||||
|                         auto_created=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         parent_link=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_core.provider", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "event_retention", | ||||
|                     models.TextField( | ||||
|                         default="days=30", | ||||
|                         validators=[authentik.lib.utils.time.timedelta_string_validator], | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "oidc_auth_providers", | ||||
|                     models.ManyToManyField( | ||||
|                         blank=True, default=None, to="authentik_providers_oauth2.oauth2provider" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "signing_key", | ||||
|                     models.ForeignKey( | ||||
|                         help_text="Key used to sign the SSF Events.", | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_crypto.certificatekeypair", | ||||
|                         verbose_name="Signing Key", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "token", | ||||
|                     models.ForeignKey( | ||||
|                         default=None, | ||||
|                         null=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_core.token", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Shared Signals Framework Provider", | ||||
|                 "verbose_name_plural": "Shared Signals Framework Providers", | ||||
|                 "permissions": [("add_stream", "Add stream to SSF provider")], | ||||
|             }, | ||||
|             bases=("authentik_core.provider",), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="Stream", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "uuid", | ||||
|                     models.UUIDField( | ||||
|                         default=uuid.uuid4, editable=False, primary_key=True, serialize=False | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "delivery_method", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                                 "Risc Push", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/risc/delivery-method/poll", | ||||
|                                 "Risc Poll", | ||||
|                             ), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("endpoint_url", models.TextField(null=True)), | ||||
|                 ( | ||||
|                     "events_requested", | ||||
|                     django.contrib.postgres.fields.ArrayField( | ||||
|                         base_field=models.TextField( | ||||
|                             choices=[ | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                                     "Caep Session Revoked", | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                                     "Caep Credential Change", | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/ssf/event-type/verification", | ||||
|                                     "Set Verification", | ||||
|                                 ), | ||||
|                             ] | ||||
|                         ), | ||||
|                         default=list, | ||||
|                         size=None, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("format", models.TextField()), | ||||
|                 ( | ||||
|                     "aud", | ||||
|                     django.contrib.postgres.fields.ArrayField( | ||||
|                         base_field=models.TextField(), default=list, size=None | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("iss", models.TextField()), | ||||
|                 ( | ||||
|                     "provider", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_providers_ssf.ssfprovider", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "SSF Stream", | ||||
|                 "verbose_name_plural": "SSF Streams", | ||||
|                 "default_permissions": ["change", "delete", "view"], | ||||
|             }, | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="StreamEvent", | ||||
|             fields=[ | ||||
|                 ("created", models.DateTimeField(auto_now_add=True)), | ||||
|                 ("last_updated", models.DateTimeField(auto_now=True)), | ||||
|                 ("expires", models.DateTimeField(default=None, null=True)), | ||||
|                 ("expiring", models.BooleanField(default=True)), | ||||
|                 ( | ||||
|                     "uuid", | ||||
|                     models.UUIDField( | ||||
|                         default=uuid.uuid4, editable=False, primary_key=True, serialize=False | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "status", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ("pending_new", "Pending New"), | ||||
|                             ("pending_failed", "Pending Failed"), | ||||
|                             ("sent", "Sent"), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "type", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                                 "Caep Session Revoked", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                                 "Caep Credential Change", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/ssf/event-type/verification", | ||||
|                                 "Set Verification", | ||||
|                             ), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("payload", models.JSONField(default=dict)), | ||||
|                 ( | ||||
|                     "stream", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_providers_ssf.stream", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "SSF Stream Event", | ||||
|                 "verbose_name_plural": "SSF Stream Events", | ||||
|                 "ordering": ("-created",), | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,178 +0,0 @@ | ||||
| from datetime import datetime | ||||
| from functools import cached_property | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey | ||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey | ||||
| from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.templatetags.static import static | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from jwt import encode | ||||
|  | ||||
| from authentik.core.models import BackchannelProvider, ExpiringModel, Token | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.models import CreatedUpdatedModel | ||||
| from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator | ||||
| from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider | ||||
|  | ||||
|  | ||||
| class EventTypes(models.TextChoices): | ||||
|     """SSF Event types supported by authentik""" | ||||
|  | ||||
|     CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked" | ||||
|     CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|     SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification" | ||||
|  | ||||
|  | ||||
| class DeliveryMethods(models.TextChoices): | ||||
|     """SSF Delivery methods""" | ||||
|  | ||||
|     RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push" | ||||
|     RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll" | ||||
|  | ||||
|  | ||||
| class SSFEventStatus(models.TextChoices): | ||||
|     """SSF Event status""" | ||||
|  | ||||
|     PENDING_NEW = "pending_new" | ||||
|     PENDING_FAILED = "pending_failed" | ||||
|     SENT = "sent" | ||||
|  | ||||
|  | ||||
| class SSFProvider(BackchannelProvider): | ||||
|     """Shared Signals Framework provider to allow applications to | ||||
|     receive user events from authentik.""" | ||||
|  | ||||
|     signing_key = models.ForeignKey( | ||||
|         CertificateKeyPair, | ||||
|         verbose_name=_("Signing Key"), | ||||
|         on_delete=models.CASCADE, | ||||
|         help_text=_("Key used to sign the SSF Events."), | ||||
|     ) | ||||
|  | ||||
|     oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None) | ||||
|  | ||||
|     token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None) | ||||
|  | ||||
|     event_retention = models.TextField( | ||||
|         default="days=30", | ||||
|         validators=[timedelta_string_validator], | ||||
|     ) | ||||
|  | ||||
|     @cached_property | ||||
|     def jwt_key(self) -> tuple[PrivateKeyTypes, str]: | ||||
|         """Get either the configured certificate or the client secret""" | ||||
|         key: CertificateKeyPair = self.signing_key | ||||
|         private_key = key.private_key | ||||
|         if isinstance(private_key, RSAPrivateKey): | ||||
|             return private_key, JWTAlgorithms.RS256 | ||||
|         if isinstance(private_key, EllipticCurvePrivateKey): | ||||
|             return private_key, JWTAlgorithms.ES256 | ||||
|         raise ValueError(f"Invalid private key type: {type(private_key)}") | ||||
|  | ||||
|     @property | ||||
|     def service_account_identifier(self) -> str: | ||||
|         return f"ak-providers-ssf-{self.pk}" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self): | ||||
|         from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer | ||||
|  | ||||
|         return SSFProviderSerializer | ||||
|  | ||||
|     @property | ||||
|     def icon_url(self) -> str | None: | ||||
|         return static("authentik/sources/ssf.svg") | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-provider-ssf-form" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Shared Signals Framework Provider") | ||||
|         verbose_name_plural = _("Shared Signals Framework Providers") | ||||
|         permissions = [ | ||||
|             # This overrides the default "add_stream" permission of the Stream object, | ||||
|             # as the user requesting to add a stream must have the permission on the provider | ||||
|             ("add_stream", _("Add stream to SSF provider")), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class Stream(models.Model): | ||||
|     """SSF Stream""" | ||||
|  | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False) | ||||
|     provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE) | ||||
|  | ||||
|     delivery_method = models.TextField(choices=DeliveryMethods.choices) | ||||
|     endpoint_url = models.TextField(null=True) | ||||
|  | ||||
|     events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list) | ||||
|     format = models.TextField() | ||||
|     aud = ArrayField(models.TextField(), default=list) | ||||
|  | ||||
|     iss = models.TextField() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("SSF Stream") | ||||
|         verbose_name_plural = _("SSF Streams") | ||||
|         default_permissions = ["change", "delete", "view"] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return "SSF Stream" | ||||
|  | ||||
|     def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict: | ||||
|         jti = uuid4() | ||||
|         _now = now() | ||||
|         return { | ||||
|             "uuid": jti, | ||||
|             "stream_id": str(self.pk), | ||||
|             "type": type, | ||||
|             "expiring": True, | ||||
|             "status": SSFEventStatus.PENDING_NEW, | ||||
|             "expires": _now + timedelta_from_string(self.provider.event_retention), | ||||
|             "payload": { | ||||
|                 "jti": jti.hex, | ||||
|                 "aud": self.aud, | ||||
|                 "iat": int(datetime.now().timestamp()), | ||||
|                 "iss": self.iss, | ||||
|                 "events": {type: event_data}, | ||||
|                 **kwargs, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|     def encode(self, data: dict) -> str: | ||||
|         headers = {} | ||||
|         if self.provider.signing_key: | ||||
|             headers["kid"] = self.provider.signing_key.kid | ||||
|         key, alg = self.provider.jwt_key | ||||
|         return encode(data, key, algorithm=alg, headers=headers) | ||||
|  | ||||
|  | ||||
| class StreamEvent(CreatedUpdatedModel, ExpiringModel): | ||||
|     """Single stream event to be sent""" | ||||
|  | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False) | ||||
|  | ||||
|     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) | ||||
|     status = models.TextField(choices=SSFEventStatus.choices) | ||||
|  | ||||
|     type = models.TextField(choices=EventTypes.choices) | ||||
|     payload = models.JSONField(default=dict) | ||||
|  | ||||
|     def expire_action(self, *args, **kwargs): | ||||
|         """Only allow automatic cleanup of successfully sent event""" | ||||
|         if self.status != SSFEventStatus.SENT: | ||||
|             return | ||||
|         return super().expire_action(*args, **kwargs) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Stream event {self.type}" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("SSF Stream Event") | ||||
|         verbose_name_plural = _("SSF Stream Events") | ||||
|         ordering = ("-created",) | ||||
| @ -1,193 +0,0 @@ | ||||
| from hashlib import sha256 | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_delete, post_save, pre_delete | ||||
| from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from guardian.shortcuts import assign_perm | ||||
|  | ||||
| from authentik.core.models import ( | ||||
|     USER_PATH_SYSTEM_PREFIX, | ||||
|     AuthenticatedSession, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
|     User, | ||||
|     UserTypes, | ||||
| ) | ||||
| from authentik.core.signals import password_changed | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     EventTypes, | ||||
|     SSFProvider, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.tasks import send_ssf_event | ||||
| from authentik.events.middleware import audit_ignore | ||||
| from authentik.stages.authenticator.models import Device | ||||
| from authentik.stages.authenticator_duo.models import DuoDevice | ||||
| from authentik.stages.authenticator_static.models import StaticDevice | ||||
| from authentik.stages.authenticator_totp.models import TOTPDevice | ||||
| from authentik.stages.authenticator_webauthn.models import ( | ||||
|     UNKNOWN_DEVICE_TYPE_AAGUID, | ||||
|     WebAuthnDevice, | ||||
| ) | ||||
|  | ||||
| USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf" | ||||
|  | ||||
|  | ||||
| @receiver(post_save, sender=SSFProvider) | ||||
| def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_): | ||||
|     """Create service account before provider is saved""" | ||||
|     identifier = instance.service_account_identifier | ||||
|     user, _ = User.objects.update_or_create( | ||||
|         username=identifier, | ||||
|         defaults={ | ||||
|             "name": f"SSF Provider {instance.name} Service-Account", | ||||
|             "type": UserTypes.INTERNAL_SERVICE_ACCOUNT, | ||||
|             "path": USER_PATH_PROVIDERS_SSF, | ||||
|         }, | ||||
|     ) | ||||
|     assign_perm("add_stream", user, instance) | ||||
|     token, token_created = Token.objects.update_or_create( | ||||
|         identifier=identifier, | ||||
|         defaults={ | ||||
|             "user": user, | ||||
|             "intent": TokenIntents.INTENT_API, | ||||
|             "expiring": False, | ||||
|             "managed": f"goauthentik.io/providers/ssf/{instance.pk}", | ||||
|         }, | ||||
|     ) | ||||
|     if created or token_created: | ||||
|         with audit_ignore(): | ||||
|             instance.token = token | ||||
|             instance.save() | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
| def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_): | ||||
|     """Session revoked trigger (user logged out)""" | ||||
|     if not request.session or not request.session.session_key or not user: | ||||
|         return | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_SESSION_REVOKED, | ||||
|         { | ||||
|             "initiating_entity": "user", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "session": { | ||||
|                 "format": "opaque", | ||||
|                 "id": sha256(request.session.session_key.encode("ascii")).hexdigest(), | ||||
|             }, | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": user.email, | ||||
|             }, | ||||
|         }, | ||||
|         request=request, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_): | ||||
|     """Session revoked trigger (users' session has been deleted) | ||||
|  | ||||
|     As this signal is also triggered with a regular logout, we can't be sure | ||||
|     if the session has been deleted by an admin or by the user themselves.""" | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_SESSION_REVOKED, | ||||
|         { | ||||
|             "initiating_entity": "user", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "session": { | ||||
|                 "format": "opaque", | ||||
|                 "id": sha256(instance.session_key.encode("ascii")).hexdigest(), | ||||
|             }, | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(password_changed) | ||||
| def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_): | ||||
|     """Credential change trigger (password changed)""" | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         { | ||||
|             "credential_type": "password", | ||||
|             "change_type": "revoke" if password is None else "update", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| device_type_map = { | ||||
|     StaticDevice: "pin", | ||||
|     TOTPDevice: "pin", | ||||
|     WebAuthnDevice: "fido-u2f", | ||||
|     DuoDevice: "app", | ||||
| } | ||||
|  | ||||
|  | ||||
| @receiver(post_save) | ||||
| def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_): | ||||
|     if not isinstance(instance, Device): | ||||
|         return | ||||
|     if not instance.confirmed: | ||||
|         return | ||||
|     device_type = device_type_map.get(instance.__class__) | ||||
|     data = { | ||||
|         "credential_type": device_type, | ||||
|         "change_type": "create" if created else "update", | ||||
|         "friendly_name": instance.name, | ||||
|     } | ||||
|     if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID: | ||||
|         data["fido2_aaguid"] = instance.aaguid | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         data, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(post_delete) | ||||
| def ssf_device_post_delete(sender: type[Model], instance: Device, **_): | ||||
|     if not isinstance(instance, Device): | ||||
|         return | ||||
|     if not instance.confirmed: | ||||
|         return | ||||
|     device_type = device_type_map.get(instance.__class__) | ||||
|     data = { | ||||
|         "credential_type": device_type, | ||||
|         "change_type": "delete", | ||||
|         "friendly_name": instance.name, | ||||
|     } | ||||
|     if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID: | ||||
|         data["fido2_aaguid"] = instance.aaguid | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         data, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
| @ -1,136 +0,0 @@ | ||||
| from celery import group | ||||
| from django.http import HttpRequest | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from requests.exceptions import RequestException | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     DeliveryMethods, | ||||
|     EventTypes, | ||||
|     SSFEventStatus, | ||||
|     Stream, | ||||
|     StreamEvent, | ||||
| ) | ||||
| from authentik.events.logs import LogEvent | ||||
| from authentik.events.models import TaskStatus | ||||
| from authentik.events.system_tasks import SystemTask | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| session = get_http_session() | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def send_ssf_event( | ||||
|     event_type: EventTypes, | ||||
|     data: dict, | ||||
|     stream_filter: dict | None = None, | ||||
|     request: HttpRequest | None = None, | ||||
|     **extra_data, | ||||
| ): | ||||
|     """Wrapper to send an SSF event to multiple streams""" | ||||
|     payload = [] | ||||
|     if not stream_filter: | ||||
|         stream_filter = {} | ||||
|     stream_filter["events_requested__contains"] = [event_type] | ||||
|     if request and hasattr(request, "request_id"): | ||||
|         extra_data.setdefault("txn", request.request_id) | ||||
|     for stream in Stream.objects.filter(**stream_filter): | ||||
|         event_data = stream.prepare_event_payload(event_type, data, **extra_data) | ||||
|         payload.append((str(stream.uuid), event_data)) | ||||
|     return _send_ssf_event.delay(payload) | ||||
|  | ||||
|  | ||||
| def _check_app_access(stream_uuid: str, event_data: dict) -> bool: | ||||
|     """Check if event is related to user and if so, check | ||||
|     if the user has access to the application""" | ||||
|     stream = Stream.objects.filter(pk=stream_uuid).first() | ||||
|     if not stream: | ||||
|         return False | ||||
|     # `event_data` is a dict version of a StreamEvent | ||||
|     sub_id = event_data.get("payload", {}).get("sub_id", {}) | ||||
|     email = sub_id.get("user", {}).get("email", None) | ||||
|     if not email: | ||||
|         return True | ||||
|     user = User.objects.filter(email=email).first() | ||||
|     if not user: | ||||
|         return True | ||||
|     engine = PolicyEngine(stream.provider.backchannel_application, user) | ||||
|     engine.use_cache = False | ||||
|     engine.build() | ||||
|     return engine.passing | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def _send_ssf_event(event_data: list[tuple[str, dict]]): | ||||
|     tasks = [] | ||||
|     for stream, data in event_data: | ||||
|         if not _check_app_access(stream, data): | ||||
|             continue | ||||
|         event = StreamEvent.objects.create(**data) | ||||
|         tasks.extend(send_single_ssf_event(stream, str(event.uuid))) | ||||
|     main_task = group(*tasks) | ||||
|     main_task() | ||||
|  | ||||
|  | ||||
| def send_single_ssf_event(stream_id: str, evt_id: str): | ||||
|     stream = Stream.objects.filter(pk=stream_id).first() | ||||
|     if not stream: | ||||
|         return | ||||
|     event = StreamEvent.objects.filter(pk=evt_id).first() | ||||
|     if not event: | ||||
|         return | ||||
|     if event.status == SSFEventStatus.SENT: | ||||
|         return | ||||
|     if stream.delivery_method == DeliveryMethods.RISC_PUSH: | ||||
|         return [ssf_push_event.si(str(event.pk))] | ||||
|     return [] | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| def ssf_push_event(self: SystemTask, event_id: str): | ||||
|     self.save_on_success = False | ||||
|     event = StreamEvent.objects.filter(pk=event_id).first() | ||||
|     if not event: | ||||
|         return | ||||
|     self.set_uid(event_id) | ||||
|     if event.status == SSFEventStatus.SENT: | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|         return | ||||
|     try: | ||||
|         response = session.post( | ||||
|             event.stream.endpoint_url, | ||||
|             data=event.stream.encode(event.payload), | ||||
|             headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"}, | ||||
|         ) | ||||
|         response.raise_for_status() | ||||
|         event.status = SSFEventStatus.SENT | ||||
|         event.save() | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|         return | ||||
|     except RequestException as exc: | ||||
|         LOGGER.warning("Failed to send SSF event", exc=exc) | ||||
|         self.set_status(TaskStatus.ERROR) | ||||
|         attrs = {} | ||||
|         if exc.response: | ||||
|             attrs["response"] = { | ||||
|                 "content": exc.response.text, | ||||
|                 "status": exc.response.status_code, | ||||
|             } | ||||
|         self.set_error( | ||||
|             exc, | ||||
|             LogEvent( | ||||
|                 _("Failed to send request"), | ||||
|                 log_level="warning", | ||||
|                 logger=self.__name__, | ||||
|                 attributes=attrs, | ||||
|             ), | ||||
|         ) | ||||
|         # Re-up the expiry of the stream event | ||||
|         event.expires = now() + timedelta_from_string(event.stream.provider.event_retention) | ||||
|         event.status = SSFEventStatus.PENDING_FAILED | ||||
|         event.save() | ||||
| @ -1,46 +0,0 @@ | ||||
| import json | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_cert | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     SSFProvider, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestConfiguration(APITestCase): | ||||
|     def setUp(self): | ||||
|         self.application = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         self.provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|             backchannel_application=self.application, | ||||
|         ) | ||||
|  | ||||
|     def test_config_fetch(self): | ||||
|         """test SSF configuration (unauthenticated)""" | ||||
|         res = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         content = json.loads(res.content) | ||||
|         self.assertEqual(content["spec_version"], "1_0-ID2") | ||||
|  | ||||
|     def test_config_fetch_authenticated(self): | ||||
|         """test SSF configuration (authenticated)""" | ||||
|         res = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         content = json.loads(res.content) | ||||
|         self.assertEqual(content["spec_version"], "1_0-ID2") | ||||
| @ -1,51 +0,0 @@ | ||||
| """JWKS tests""" | ||||
|  | ||||
| import base64 | ||||
| import json | ||||
|  | ||||
| from cryptography.hazmat.backends import default_backend | ||||
| from cryptography.x509 import load_der_x509_certificate | ||||
| from django.test import TestCase | ||||
| from django.urls.base import reverse | ||||
| from jwt import PyJWKSet | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_cert | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestJWKS(TestCase): | ||||
|     """Test JWKS view""" | ||||
|  | ||||
|     def test_rs256(self): | ||||
|         """Test JWKS request with RS256""" | ||||
|         provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         app.backchannel_providers.add(provider) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug}) | ||||
|         ) | ||||
|         body = json.loads(response.content.decode()) | ||||
|         self.assertEqual(len(body["keys"]), 1) | ||||
|         PyJWKSet.from_dict(body) | ||||
|         key = body["keys"][0] | ||||
|         load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key() | ||||
|  | ||||
|     def test_es256(self): | ||||
|         """Test JWKS request with ES256""" | ||||
|         provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         app.backchannel_providers.add(provider) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug}) | ||||
|         ) | ||||
|         body = json.loads(response.content.decode()) | ||||
|         self.assertEqual(len(body["keys"]), 1) | ||||
|         PyJWKSet.from_dict(body) | ||||
| @ -1,168 +0,0 @@ | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application, Group | ||||
| from authentik.core.tests.utils import ( | ||||
|     create_test_cert, | ||||
|     create_test_user, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     EventTypes, | ||||
|     SSFEventStatus, | ||||
|     SSFProvider, | ||||
|     Stream, | ||||
|     StreamEvent, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
|  | ||||
|  | ||||
| class TestSignals(APITestCase): | ||||
|     """Test individual SSF Signals""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.application = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         self.provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|             backchannel_application=self.application, | ||||
|         ) | ||||
|         res = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             data={ | ||||
|                 "iss": "https://authentik.company/.well-known/ssf-configuration/foo/5", | ||||
|                 "aud": ["https://app.authentik.company"], | ||||
|                 "delivery": { | ||||
|                     "method": "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                     "endpoint_url": "https://app.authentik.company", | ||||
|                 }, | ||||
|                 "events_requested": [ | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                 ], | ||||
|                 "format": "iss_sub", | ||||
|             }, | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201, res.content) | ||||
|  | ||||
|     def test_signal_logout(self): | ||||
|         """Test user logout""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         self.client.logout() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/session-revoked" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["initiating_entity"], "user") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_password_change(self): | ||||
|         """Test user password change""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         user.set_password(generate_id()) | ||||
|         user.save() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "update") | ||||
|         self.assertEqual(event_payload["credential_type"], "password") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_authenticator_added(self): | ||||
|         """Test authenticator creation signal""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         dev = WebAuthnDevice.objects.create( | ||||
|             user=user, | ||||
|             name=generate_id(), | ||||
|             credential_id=generate_id(), | ||||
|             public_key=generate_id(), | ||||
|             aaguid=str(uuid4()), | ||||
|         ) | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).exclude().first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "create") | ||||
|         self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid) | ||||
|         self.assertEqual(event_payload["friendly_name"], dev.name) | ||||
|         self.assertEqual(event_payload["credential_type"], "fido-u2f") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_authenticator_deleted(self): | ||||
|         """Test authenticator deletion signal""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         dev = WebAuthnDevice.objects.create( | ||||
|             user=user, | ||||
|             name=generate_id(), | ||||
|             credential_id=generate_id(), | ||||
|             public_key=generate_id(), | ||||
|             aaguid=str(uuid4()), | ||||
|         ) | ||||
|         dev.delete() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).exclude().first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "delete") | ||||
|         self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid) | ||||
|         self.assertEqual(event_payload["friendly_name"], dev.name) | ||||
|         self.assertEqual(event_payload["credential_type"], "fido-u2f") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_policy_ignore(self): | ||||
|         """Test event not being created for user that doesn't have access to the application""" | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.application, group=Group.objects.create(name=generate_id()), order=0 | ||||
|         ) | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         user.set_password(generate_id()) | ||||
|         user.save() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter( | ||||
|             stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE | ||||
|         ).first() | ||||
|         self.assertIsNone(event) | ||||
| @ -1,154 +0,0 @@ | ||||
| import json | ||||
| from dataclasses import asdict | ||||
|  | ||||
| from django.urls import reverse | ||||
| from django.utils import timezone | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     SSFEventStatus, | ||||
|     SSFProvider, | ||||
|     Stream, | ||||
|     StreamEvent, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.providers.oauth2.id_token import IDToken | ||||
| from authentik.providers.oauth2.models import AccessToken, OAuth2Provider | ||||
|  | ||||
|  | ||||
| class TestStream(APITestCase): | ||||
|     def setUp(self): | ||||
|         self.application = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         self.provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|             backchannel_application=self.application, | ||||
|         ) | ||||
|  | ||||
|     def test_stream_add_token(self): | ||||
|         """test stream add (token auth)""" | ||||
|         res = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             data={ | ||||
|                 "iss": "https://authentik.company/.well-known/ssf-configuration/foo/5", | ||||
|                 "aud": ["https://app.authentik.company"], | ||||
|                 "delivery": { | ||||
|                     "method": "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                     "endpoint_url": "https://app.authentik.company", | ||||
|                 }, | ||||
|                 "events_requested": [ | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                 ], | ||||
|                 "format": "iss_sub", | ||||
|             }, | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201) | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         self.assertEqual( | ||||
|             event.payload["events"], | ||||
|             {"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}}, | ||||
|         ) | ||||
|  | ||||
|     def test_stream_add_poll(self): | ||||
|         """test stream add - poll method""" | ||||
|         res = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             data={ | ||||
|                 "iss": "https://authentik.company/.well-known/ssf-configuration/foo/5", | ||||
|                 "aud": ["https://app.authentik.company"], | ||||
|                 "delivery": { | ||||
|                     "method": "https://schemas.openid.net/secevent/risc/delivery-method/poll", | ||||
|                 }, | ||||
|                 "events_requested": [ | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                 ], | ||||
|                 "format": "iss_sub", | ||||
|             }, | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content, | ||||
|             {"delivery": {"method": ["Polling for SSF events is not currently supported."]}}, | ||||
|         ) | ||||
|  | ||||
|     def test_stream_add_oidc(self): | ||||
|         """test stream add (oidc auth)""" | ||||
|         provider = OAuth2Provider.objects.create( | ||||
|             name=generate_id(), | ||||
|             authorization_flow=create_test_flow(), | ||||
|         ) | ||||
|         self.application.provider = provider | ||||
|         self.application.save() | ||||
|         user = create_test_admin_user() | ||||
|         token = AccessToken.objects.create( | ||||
|             provider=provider, | ||||
|             user=user, | ||||
|             token=generate_id(), | ||||
|             auth_time=timezone.now(), | ||||
|             _scope="openid user profile", | ||||
|             _id_token=json.dumps( | ||||
|                 asdict( | ||||
|                     IDToken("foo", "bar"), | ||||
|                 ) | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         res = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             data={ | ||||
|                 "iss": "https://authentik.company/.well-known/ssf-configuration/foo/5", | ||||
|                 "aud": ["https://app.authentik.company"], | ||||
|                 "delivery": { | ||||
|                     "method": "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                     "endpoint_url": "https://app.authentik.company", | ||||
|                 }, | ||||
|                 "events_requested": [ | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                 ], | ||||
|                 "format": "iss_sub", | ||||
|             }, | ||||
|             HTTP_AUTHORIZATION=f"Bearer {token.token}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201) | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         self.assertEqual( | ||||
|             event.payload["events"], | ||||
|             {"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}}, | ||||
|         ) | ||||
|  | ||||
|     def test_stream_delete(self): | ||||
|         """delete stream""" | ||||
|         stream = Stream.objects.create(provider=self.provider) | ||||
|         res = self.client.delete( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 204) | ||||
|         self.assertFalse(Stream.objects.filter(pk=stream.pk).exists()) | ||||
| @ -1,32 +0,0 @@ | ||||
| """SSF provider URLs""" | ||||
|  | ||||
| from django.urls import path | ||||
|  | ||||
| from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet | ||||
| from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet | ||||
| from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView | ||||
| from authentik.enterprise.providers.ssf.views.jwks import JWKSview | ||||
| from authentik.enterprise.providers.ssf.views.stream import StreamView | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path( | ||||
|         "application/ssf/<slug:application_slug>/ssf-jwks/", | ||||
|         JWKSview.as_view(), | ||||
|         name="jwks", | ||||
|     ), | ||||
|     path( | ||||
|         ".well-known/ssf-configuration/<slug:application_slug>", | ||||
|         ConfigurationView.as_view(), | ||||
|         name="configuration", | ||||
|     ), | ||||
|     path( | ||||
|         "application/ssf/<slug:application_slug>/stream/", | ||||
|         StreamView.as_view(), | ||||
|         name="stream", | ||||
|     ), | ||||
| ] | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|     ("providers/ssf", SSFProviderViewSet), | ||||
|     ("ssf/streams", SSFStreamViewSet), | ||||
| ] | ||||
| @ -1,66 +0,0 @@ | ||||
| """SSF Token auth""" | ||||
|  | ||||
| from typing import TYPE_CHECKING, Any | ||||
|  | ||||
| from django.db.models import Q | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.request import Request | ||||
|  | ||||
| from authentik.core.models import Token, TokenIntents, User | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
| from authentik.providers.oauth2.models import AccessToken | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from authentik.enterprise.providers.ssf.views.base import SSFView | ||||
|  | ||||
|  | ||||
| class SSFTokenAuth(BaseAuthentication): | ||||
|     """SSF Token auth""" | ||||
|  | ||||
|     view: "SSFView" | ||||
|  | ||||
|     def __init__(self, view: "SSFView") -> None: | ||||
|         super().__init__() | ||||
|         self.view = view | ||||
|  | ||||
|     def check_token(self, key: str) -> Token | None: | ||||
|         """Check that a token exists, is not expired, and is assigned to the correct provider""" | ||||
|         token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first() | ||||
|         if not token: | ||||
|             return None | ||||
|         provider: SSFProvider = token.ssfprovider_set.first() | ||||
|         if not provider: | ||||
|             return None | ||||
|         self.view.application = provider.backchannel_application | ||||
|         self.view.provider = provider | ||||
|         return token | ||||
|  | ||||
|     def check_jwt(self, jwt: str) -> AccessToken | None: | ||||
|         """Check JWT-based authentication, this supports tokens issued either by providers | ||||
|         configured directly in the provider, and by providers assigned to the application | ||||
|         that the SSF provider is a backchannel provider of.""" | ||||
|         token = AccessToken.filter_not_expired(token=jwt, revoked=False).first() | ||||
|         if not token: | ||||
|             return None | ||||
|         ssf_provider = SSFProvider.objects.filter( | ||||
|             Q(oidc_auth_providers__in=[token.provider]) | ||||
|             | Q(backchannel_application__provider__in=[token.provider]), | ||||
|         ).first() | ||||
|         if not ssf_provider: | ||||
|             return None | ||||
|         self.view.application = ssf_provider.backchannel_application | ||||
|         self.view.provider = ssf_provider | ||||
|         return token | ||||
|  | ||||
|     def authenticate(self, request: Request) -> tuple[User, Any] | None: | ||||
|         auth = get_authorization_header(request).decode() | ||||
|         auth_type, _, key = auth.partition(" ") | ||||
|         if auth_type != "Bearer": | ||||
|             return None | ||||
|         token = self.check_token(key) | ||||
|         if token: | ||||
|             return (token.user, token) | ||||
|         jwt_token = self.check_jwt(key) | ||||
|         if jwt_token: | ||||
|             return (jwt_token.user, token) | ||||
|         return None | ||||
| @ -1,23 +0,0 @@ | ||||
| from django.http import HttpRequest | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.views import APIView | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
| from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth | ||||
|  | ||||
|  | ||||
| class SSFView(APIView): | ||||
|     application: Application | ||||
|     provider: SSFProvider | ||||
|     logger: BoundLogger | ||||
|  | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def setup(self, request: HttpRequest, *args, **kwargs) -> None: | ||||
|         self.logger = get_logger().bind() | ||||
|         super().setup(request, *args, **kwargs) | ||||
|  | ||||
|     def get_authenticators(self): | ||||
|         return [SSFTokenAuth(self)] | ||||
| @ -1,55 +0,0 @@ | ||||
| from django.http import Http404, HttpRequest, HttpResponse, JsonResponse | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.urls import reverse | ||||
| from rest_framework.permissions import AllowAny | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider | ||||
| from authentik.enterprise.providers.ssf.views.base import SSFView | ||||
|  | ||||
|  | ||||
| class ConfigurationView(SSFView): | ||||
|     """SSF configuration endpoint""" | ||||
|  | ||||
|     permission_classes = [AllowAny] | ||||
|  | ||||
|     def get_authenticators(self): | ||||
|         return [] | ||||
|  | ||||
|     def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse: | ||||
|         application = get_object_or_404(Application, slug=application_slug) | ||||
|         provider = application.backchannel_provider_for(SSFProvider) | ||||
|         if not provider: | ||||
|             raise Http404 | ||||
|         data = { | ||||
|             "spec_version": "1_0-ID2", | ||||
|             "issuer": self.request.build_absolute_uri( | ||||
|                 reverse( | ||||
|                     "authentik_providers_ssf:configuration", | ||||
|                     kwargs={ | ||||
|                         "application_slug": application.slug, | ||||
|                     }, | ||||
|                 ) | ||||
|             ), | ||||
|             "jwks_uri": self.request.build_absolute_uri( | ||||
|                 reverse( | ||||
|                     "authentik_providers_ssf:jwks", | ||||
|                     kwargs={ | ||||
|                         "application_slug": application.slug, | ||||
|                     }, | ||||
|                 ) | ||||
|             ), | ||||
|             "configuration_endpoint": self.request.build_absolute_uri( | ||||
|                 reverse( | ||||
|                     "authentik_providers_ssf:stream", | ||||
|                     kwargs={ | ||||
|                         "application_slug": application.slug, | ||||
|                     }, | ||||
|                 ) | ||||
|             ), | ||||
|             "delivery_methods_supported": [ | ||||
|                 DeliveryMethods.RISC_PUSH, | ||||
|             ], | ||||
|             "authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}], | ||||
|         } | ||||
|         return JsonResponse(data) | ||||
| @ -1,31 +0,0 @@ | ||||
| from django.http import Http404, HttpRequest, HttpResponse, JsonResponse | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.views import View | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
| from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView | ||||
|  | ||||
|  | ||||
| class JWKSview(View): | ||||
|     """SSF JWKS endpoint, similar to the OAuth2 provider's endpoint""" | ||||
|  | ||||
|     def get(self, request: HttpRequest, application_slug: str) -> HttpResponse: | ||||
|         """Show JWK Key data for Provider""" | ||||
|         application = get_object_or_404(Application, slug=application_slug) | ||||
|         provider = application.backchannel_provider_for(SSFProvider) | ||||
|         if not provider: | ||||
|             raise Http404 | ||||
|         signing_key: CertificateKeyPair = provider.signing_key | ||||
|  | ||||
|         response_data = {} | ||||
|  | ||||
|         jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig") | ||||
|         if jwk: | ||||
|             response_data["keys"] = [jwk] | ||||
|  | ||||
|         response = JsonResponse(response_data) | ||||
|         response["Access-Control-Allow-Origin"] = "*" | ||||
|  | ||||
|         return response | ||||
| @ -1,130 +0,0 @@ | ||||
| from django.http import HttpRequest | ||||
| from django.urls import reverse | ||||
| from rest_framework.exceptions import PermissionDenied, ValidationError | ||||
| from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     DeliveryMethods, | ||||
|     EventTypes, | ||||
|     SSFProvider, | ||||
|     Stream, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.tasks import send_ssf_event | ||||
| from authentik.enterprise.providers.ssf.views.base import SSFView | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class StreamDeliverySerializer(PassiveSerializer): | ||||
|     method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods]) | ||||
|     endpoint_url = CharField(required=False) | ||||
|  | ||||
|     def validate_method(self, method: DeliveryMethods): | ||||
|         """Currently only push is supported""" | ||||
|         if method == DeliveryMethods.RISC_POLL: | ||||
|             raise ValidationError("Polling for SSF events is not currently supported.") | ||||
|         return method | ||||
|  | ||||
|     def validate(self, attrs: dict) -> dict: | ||||
|         if attrs["method"] == DeliveryMethods.RISC_PUSH: | ||||
|             if not attrs.get("endpoint_url"): | ||||
|                 raise ValidationError("Endpoint URL is required when using push.") | ||||
|         return attrs | ||||
|  | ||||
|  | ||||
| class StreamSerializer(ModelSerializer): | ||||
|     delivery = StreamDeliverySerializer() | ||||
|     events_requested = ListField( | ||||
|         child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes]) | ||||
|     ) | ||||
|     format = CharField() | ||||
|     aud = ListField(child=CharField()) | ||||
|  | ||||
|     def create(self, validated_data): | ||||
|         provider: SSFProvider = validated_data["provider"] | ||||
|         request: HttpRequest = self.context["request"] | ||||
|         iss = request.build_absolute_uri( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={ | ||||
|                     "application_slug": provider.backchannel_application.slug, | ||||
|                 }, | ||||
|             ) | ||||
|         ) | ||||
|         # Ensure that streams always get SET verification events sent to them | ||||
|         validated_data["events_requested"].append(EventTypes.SET_VERIFICATION) | ||||
|         return super().create( | ||||
|             { | ||||
|                 "delivery_method": validated_data["delivery"]["method"], | ||||
|                 "endpoint_url": validated_data["delivery"].get("endpoint_url"), | ||||
|                 "format": validated_data["format"], | ||||
|                 "provider": validated_data["provider"], | ||||
|                 "events_requested": validated_data["events_requested"], | ||||
|                 "aud": validated_data["aud"], | ||||
|                 "iss": iss, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Stream | ||||
|         fields = [ | ||||
|             "delivery", | ||||
|             "events_requested", | ||||
|             "format", | ||||
|             "aud", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class StreamResponseSerializer(PassiveSerializer): | ||||
|     stream_id = CharField(source="pk") | ||||
|     iss = CharField() | ||||
|     aud = ListField(child=CharField()) | ||||
|     delivery = SerializerMethodField() | ||||
|     format = CharField() | ||||
|  | ||||
|     events_requested = ListField(child=CharField()) | ||||
|     events_supported = SerializerMethodField() | ||||
|     events_delivered = ListField(child=CharField(), source="events_requested") | ||||
|  | ||||
|     def get_delivery(self, instance: Stream) -> StreamDeliverySerializer: | ||||
|         return { | ||||
|             "method": instance.delivery_method, | ||||
|             "endpoint_url": instance.endpoint_url, | ||||
|         } | ||||
|  | ||||
|     def get_events_supported(self, instance: Stream) -> list[str]: | ||||
|         return [x.value for x in EventTypes] | ||||
|  | ||||
|  | ||||
| class StreamView(SSFView): | ||||
|     def post(self, request: Request, *args, **kwargs) -> Response: | ||||
|         stream = StreamSerializer(data=request.data, context={"request": request}) | ||||
|         stream.is_valid(raise_exception=True) | ||||
|         if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider): | ||||
|             raise PermissionDenied( | ||||
|                 "User does not have permission to create stream for this provider." | ||||
|             ) | ||||
|         instance: Stream = stream.save(provider=self.provider) | ||||
|         send_ssf_event( | ||||
|             EventTypes.SET_VERIFICATION, | ||||
|             { | ||||
|                 "state": None, | ||||
|             }, | ||||
|             stream_filter={"pk": instance.uuid}, | ||||
|             sub_id={"format": "opaque", "id": str(instance.uuid)}, | ||||
|         ) | ||||
|         response = StreamResponseSerializer(instance=instance, context={"request": request}).data | ||||
|         return Response(response, status=201) | ||||
|  | ||||
|     def delete(self, request: Request, *args, **kwargs) -> Response: | ||||
|         streams = Stream.objects.filter(provider=self.provider) | ||||
|         # Technically this parameter is required by the spec... | ||||
|         if "stream_id" in request.query_params: | ||||
|             streams = streams.filter(stream_id=request.query_params["stream_id"]) | ||||
|         streams.delete() | ||||
|         return Response(status=204) | ||||
| @ -16,7 +16,7 @@ TENANT_APPS = [ | ||||
|     "authentik.enterprise.audit", | ||||
|     "authentik.enterprise.providers.google_workspace", | ||||
|     "authentik.enterprise.providers.microsoft_entra", | ||||
|     "authentik.enterprise.providers.ssf", | ||||
|     "authentik.enterprise.providers.rac", | ||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||
|     "authentik.enterprise.stages.source", | ||||
| ] | ||||
|  | ||||
| @ -9,16 +9,13 @@ from django.utils.timezone import now | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
|  | ||||
| from authentik.core.models import Source, User | ||||
| from authentik.core.sources.flow_manager import ( | ||||
|     SESSION_KEY_OVERRIDE_FLOW_TOKEN, | ||||
|     SESSION_KEY_SOURCE_FLOW_STAGES, | ||||
| ) | ||||
| from authentik.core.sources.flow_manager import SESSION_KEY_OVERRIDE_FLOW_TOKEN | ||||
| from authentik.core.types import UILoginButton | ||||
| from authentik.enterprise.stages.source.models import SourceStage | ||||
| from authentik.flows.challenge import Challenge, ChallengeResponse | ||||
| from authentik.flows.models import FlowToken, in_memory_stage | ||||
| from authentik.flows.models import FlowToken | ||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED | ||||
| from authentik.flows.stage import ChallengeStageView, StageView | ||||
| from authentik.flows.stage import ChallengeStageView | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
|  | ||||
| PLAN_CONTEXT_RESUME_TOKEN = "resume_token"  # nosec | ||||
| @ -52,7 +49,6 @@ class SourceStageView(ChallengeStageView): | ||||
|     def get_challenge(self, *args, **kwargs) -> Challenge: | ||||
|         resume_token = self.create_flow_token() | ||||
|         self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token | ||||
|         self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)] | ||||
|         return self.login_button.challenge | ||||
|  | ||||
|     def create_flow_token(self) -> FlowToken: | ||||
| @ -81,19 +77,3 @@ class SourceStageView(ChallengeStageView): | ||||
|  | ||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: | ||||
|         return self.executor.stage_ok() | ||||
|  | ||||
|  | ||||
| class SourceStageFinal(StageView): | ||||
|     """Dynamic stage injected in the source flow manager. This is injected in the | ||||
|     flow the source flow manager picks (authentication or enrollment), and will run at the end. | ||||
|     This stage uses the override flow token to resume execution of the initial flow the | ||||
|     source stage is bound to.""" | ||||
|  | ||||
|     def dispatch(self): | ||||
|         token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|         self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||
|         plan = token.plan | ||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|         response = plan.to_redirect(self.request, token.flow) | ||||
|         token.delete() | ||||
|         return response | ||||
|  | ||||
| @ -53,13 +53,12 @@ class SystemTask(TenantTask): | ||||
|             if not isinstance(msg, LogEvent): | ||||
|                 self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info") | ||||
|  | ||||
|     def set_error(self, exception: Exception, *messages: LogEvent): | ||||
|     def set_error(self, exception: Exception): | ||||
|         """Set result to error and save exception""" | ||||
|         self._status = TaskStatus.ERROR | ||||
|         self._messages = list(messages) | ||||
|         self._messages.extend( | ||||
|             [LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")] | ||||
|         ) | ||||
|         self._messages = [ | ||||
|             LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error") | ||||
|         ] | ||||
|  | ||||
|     def before_start(self, task_id, args, kwargs): | ||||
|         self._start_precise = perf_counter() | ||||
|  | ||||
| @ -3,7 +3,6 @@ | ||||
| from dataclasses import dataclass | ||||
| from typing import TYPE_CHECKING | ||||
|  | ||||
| from django.contrib.messages import INFO, add_message | ||||
| from django.http.request import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| @ -62,8 +61,6 @@ class ReevaluateMarker(StageMarker): | ||||
|         engine.request.context.update(plan.context) | ||||
|         engine.build() | ||||
|         result = engine.result | ||||
|         for message in result.messages: | ||||
|             add_message(http_request, INFO, message) | ||||
|         if result.passing: | ||||
|             return binding | ||||
|         LOGGER.warning( | ||||
|  | ||||
| @ -109,8 +109,6 @@ class FlowPlan: | ||||
|  | ||||
|     def pop(self): | ||||
|         """Pop next pending stage from bottom of list""" | ||||
|         if not self.markers and not self.bindings: | ||||
|             return | ||||
|         self.markers.pop(0) | ||||
|         self.bindings.pop(0) | ||||
|  | ||||
| @ -158,13 +156,8 @@ class FlowPlan: | ||||
|             final_stage: type[StageView] = self.bindings[-1].stage.view | ||||
|             temp_exec = FlowExecutorView(flow=flow, request=request, plan=self) | ||||
|             temp_exec.current_stage = self.bindings[-1].stage | ||||
|             temp_exec.current_stage_view = final_stage | ||||
|             temp_exec.setup(request, flow.slug) | ||||
|             stage = final_stage(request=request, executor=temp_exec) | ||||
|             response = stage.dispatch(request) | ||||
|             # Ensure we clean the flow state we have in the session before we redirect away | ||||
|             temp_exec.stage_ok() | ||||
|             return response | ||||
|             return stage.dispatch(request) | ||||
|  | ||||
|         get_qs = request.GET.copy() | ||||
|         if request.user.is_authenticated and ( | ||||
|  | ||||
| @ -103,7 +103,7 @@ class FlowExecutorView(APIView): | ||||
|  | ||||
|     permission_classes = [AllowAny] | ||||
|  | ||||
|     flow: Flow = None | ||||
|     flow: Flow | ||||
|  | ||||
|     plan: FlowPlan | None = None | ||||
|     current_binding: FlowStageBinding | None = None | ||||
| @ -114,8 +114,7 @@ class FlowExecutorView(APIView): | ||||
|  | ||||
|     def setup(self, request: HttpRequest, flow_slug: str): | ||||
|         super().setup(request, flow_slug=flow_slug) | ||||
|         if not self.flow: | ||||
|             self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug) | ||||
|         self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug) | ||||
|         self._logger = get_logger().bind(flow_slug=flow_slug) | ||||
|         set_tag("authentik.flow", self.flow.slug) | ||||
|  | ||||
|  | ||||
| @ -283,15 +283,12 @@ class ConfigLoader: | ||||
|     def get_optional_int(self, path: str, default=None) -> int | None: | ||||
|         """Wrapper for get that converts value into int or None if set""" | ||||
|         value = self.get(path, default) | ||||
|         if value is UNSET: | ||||
|             return default | ||||
|  | ||||
|         try: | ||||
|             return int(value) | ||||
|         except (ValueError, TypeError) as exc: | ||||
|             if value is None or (isinstance(value, str) and value.lower() == "null"): | ||||
|                 return default | ||||
|             if value is UNSET: | ||||
|                 return default | ||||
|                 return None | ||||
|             self.log("warning", "Failed to parse config as int", path=path, exc=str(exc)) | ||||
|             return default | ||||
|  | ||||
| @ -424,4 +421,4 @@ if __name__ == "__main__": | ||||
|     if len(argv) < 2:  # noqa: PLR2004 | ||||
|         print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder)) | ||||
|     else: | ||||
|         print(CONFIG.get(argv[-1])) | ||||
|         print(CONFIG.get(argv[1])) | ||||
|  | ||||
| @ -1,26 +0,0 @@ | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.lib.config import CONFIG | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def start_debug_server(**kwargs) -> bool: | ||||
|     """Attempt to start a debugpy server in the current process. | ||||
|     Returns true if the server was started successfully, otherwise false""" | ||||
|     if not CONFIG.get_bool("debug") and not CONFIG.get_bool("debugger"): | ||||
|         return | ||||
|     try: | ||||
|         import debugpy | ||||
|     except ImportError: | ||||
|         LOGGER.warning( | ||||
|             "Failed to import debugpy. debugpy is not included " | ||||
|             "in the default release dependencies and must be installed manually" | ||||
|         ) | ||||
|         return False | ||||
|  | ||||
|     listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901") | ||||
|     host, _, port = listen.rpartition(":") | ||||
|     debugpy.listen((host, int(port)), **kwargs)  # nosec | ||||
|     LOGGER.debug("Starting debug server", host=host, port=port) | ||||
|     return True | ||||
| @ -8,7 +8,6 @@ postgresql: | ||||
|   password: "env://POSTGRES_PASSWORD" | ||||
|   test: | ||||
|     name: test_authentik | ||||
|   default_schema: public | ||||
|   read_replicas: {} | ||||
|   # For example | ||||
|   # 0: | ||||
| @ -22,7 +21,6 @@ listen: | ||||
|   listen_radius: 0.0.0.0:1812 | ||||
|   listen_metrics: 0.0.0.0:9300 | ||||
|   listen_debug: 0.0.0.0:9900 | ||||
|   listen_debug_py: 0.0.0.0:9901 | ||||
|   trusted_proxy_cidrs: | ||||
|     - 127.0.0.0/8 | ||||
|     - 10.0.0.0/8 | ||||
| @ -59,7 +57,7 @@ cache: | ||||
| #   transport_options: "" | ||||
|  | ||||
| debug: false | ||||
| debugger: false | ||||
| remote_debug: false | ||||
|  | ||||
| log_level: info | ||||
|  | ||||
|  | ||||
| @ -22,9 +22,9 @@ class OutgoingSyncProvider(Model): | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|     def client_for_model[T: User | Group]( | ||||
|         self, model: type[T] | ||||
|     ) -> BaseOutgoingSyncClient[T, Any, Any, Self]: | ||||
|     def client_for_model[ | ||||
|         T: User | Group | ||||
|     ](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]: | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]: | ||||
|  | ||||
| @ -1,54 +0,0 @@ | ||||
| """Email utility functions""" | ||||
|  | ||||
|  | ||||
| def mask_email(email: str | None) -> str | None: | ||||
|     """Mask email address for privacy | ||||
|  | ||||
|     Args: | ||||
|         email: Email address to mask | ||||
|     Returns: | ||||
|         Masked email address or None if input is None | ||||
|     Example: | ||||
|         mask_email("myname@company.org") | ||||
|         'm*****@c******.org' | ||||
|     """ | ||||
|     if not email: | ||||
|         return None | ||||
|  | ||||
|     # Basic email format validation | ||||
|     if email.count("@") != 1: | ||||
|         raise ValueError("Invalid email format: Must contain exactly one '@' symbol") | ||||
|  | ||||
|     local, domain = email.split("@") | ||||
|     if not local or not domain: | ||||
|         raise ValueError("Invalid email format: Local and domain parts cannot be empty") | ||||
|  | ||||
|     domain_parts = domain.split(".") | ||||
|     if len(domain_parts) < 2:  # noqa: PLR2004 | ||||
|         raise ValueError("Invalid email format: Domain must contain at least one dot") | ||||
|  | ||||
|     limit = 2 | ||||
|  | ||||
|     # Mask local part (keep first char) | ||||
|     if len(local) <= limit: | ||||
|         masked_local = "*" * len(local) | ||||
|     else: | ||||
|         masked_local = local[0] + "*" * (len(local) - 1) | ||||
|  | ||||
|     # Mask each domain part except the last one (TLD) | ||||
|     masked_domain_parts = [] | ||||
|     for _i, part in enumerate(domain_parts[:-1]):  # Process all parts except TLD | ||||
|         if not part:  # Check for empty parts (consecutive dots) | ||||
|             raise ValueError("Invalid email format: Domain parts cannot be empty") | ||||
|         if len(part) <= limit: | ||||
|             masked_part = "*" * len(part) | ||||
|         else: | ||||
|             masked_part = part[0] + "*" * (len(part) - 1) | ||||
|         masked_domain_parts.append(masked_part) | ||||
|  | ||||
|     # Add TLD unchanged | ||||
|     if not domain_parts[-1]:  # Check if TLD is empty | ||||
|         raise ValueError("Invalid email format: TLD cannot be empty") | ||||
|     masked_domain_parts.append(domain_parts[-1]) | ||||
|  | ||||
|     return f"{masked_local}@{'.'.join(masked_domain_parts)}" | ||||
| @ -42,8 +42,6 @@ class DebugSession(Session): | ||||
|  | ||||
| def get_http_session() -> Session: | ||||
|     """Get a requests session with common headers""" | ||||
|     session = Session() | ||||
|     if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace": | ||||
|         session = DebugSession() | ||||
|     session = DebugSession() if CONFIG.get_bool("debug") else Session() | ||||
|     session.headers["User-Agent"] = authentik_user_agent() | ||||
|     return session | ||||
|  | ||||
| @ -19,6 +19,7 @@ from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | ||||
| from authentik.core.models import Provider | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.providers.rac.models import RACProvider | ||||
| from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator | ||||
| from authentik.outposts.api.service_connections import ServiceConnectionSerializer | ||||
| from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME | ||||
| @ -30,7 +31,6 @@ from authentik.outposts.models import ( | ||||
| ) | ||||
| from authentik.providers.ldap.models import LDAPProvider | ||||
| from authentik.providers.proxy.models import ProxyProvider | ||||
| from authentik.providers.rac.models import RACProvider | ||||
| from authentik.providers.radius.models import RadiusProvider | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -18,6 +18,8 @@ from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION | ||||
| from structlog.stdlib import get_logger | ||||
| from yaml import safe_load | ||||
|  | ||||
| from authentik.enterprise.providers.rac.controllers.docker import RACDockerController | ||||
| from authentik.enterprise.providers.rac.controllers.kubernetes import RACKubernetesController | ||||
| from authentik.events.models import TaskStatus | ||||
| from authentik.events.system_tasks import SystemTask, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| @ -39,8 +41,6 @@ from authentik.providers.ldap.controllers.docker import LDAPDockerController | ||||
| from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController | ||||
| from authentik.providers.proxy.controllers.docker import ProxyDockerController | ||||
| from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController | ||||
| from authentik.providers.rac.controllers.docker import RACDockerController | ||||
| from authentik.providers.rac.controllers.kubernetes import RACKubernetesController | ||||
| from authentik.providers.radius.controllers.docker import RadiusDockerController | ||||
| from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| @ -42,12 +42,6 @@ class GeoIPPolicySerializer(CountryFieldMixin, PolicySerializer): | ||||
|             "asns", | ||||
|             "countries", | ||||
|             "countries_obj", | ||||
|             "check_history_distance", | ||||
|             "history_max_distance_km", | ||||
|             "distance_tolerance_km", | ||||
|             "history_login_count", | ||||
|             "check_impossible_travel", | ||||
|             "impossible_tolerance_km", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,43 +0,0 @@ | ||||
| # Generated by Django 5.0.10 on 2025-01-02 20:40 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_policies_geoip", "0001_initial"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="check_history_distance", | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="check_impossible_travel", | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="distance_tolerance_km", | ||||
|             field=models.PositiveIntegerField(default=50), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="history_login_count", | ||||
|             field=models.PositiveIntegerField(default=5), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="history_max_distance_km", | ||||
|             field=models.PositiveBigIntegerField(default=100), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="geoippolicy", | ||||
|             name="impossible_tolerance_km", | ||||
|             field=models.PositiveIntegerField(default=100), | ||||
|         ), | ||||
|     ] | ||||
| @ -4,21 +4,15 @@ from itertools import chain | ||||
|  | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext as _ | ||||
| from django_countries.fields import CountryField | ||||
| from geopy import distance | ||||
| from rest_framework.serializers import BaseSerializer | ||||
|  | ||||
| from authentik.events.context_processors.geoip import GeoIPDict | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.policies.exceptions import PolicyException | ||||
| from authentik.policies.geoip.exceptions import GeoIPNotFoundException | ||||
| from authentik.policies.models import Policy | ||||
| from authentik.policies.types import PolicyRequest, PolicyResult | ||||
|  | ||||
| MAX_DISTANCE_HOUR_KM = 1000 | ||||
|  | ||||
|  | ||||
| class GeoIPPolicy(Policy): | ||||
|     """Ensure the user satisfies requirements of geography or network topology, based on IP | ||||
| @ -27,15 +21,6 @@ class GeoIPPolicy(Policy): | ||||
|     asns = ArrayField(models.IntegerField(), blank=True, default=list) | ||||
|     countries = CountryField(multiple=True, blank=True) | ||||
|  | ||||
|     distance_tolerance_km = models.PositiveIntegerField(default=50) | ||||
|  | ||||
|     check_history_distance = models.BooleanField(default=False) | ||||
|     history_max_distance_km = models.PositiveBigIntegerField(default=100) | ||||
|     history_login_count = models.PositiveIntegerField(default=5) | ||||
|  | ||||
|     check_impossible_travel = models.BooleanField(default=False) | ||||
|     impossible_tolerance_km = models.PositiveIntegerField(default=100) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[BaseSerializer]: | ||||
|         from authentik.policies.geoip.api import GeoIPPolicySerializer | ||||
| @ -52,27 +37,21 @@ class GeoIPPolicy(Policy): | ||||
|         - the client IP is advertised by an autonomous system with ASN in the `asns` | ||||
|         - the client IP is geolocated in a country of `countries` | ||||
|         """ | ||||
|         static_results: list[PolicyResult] = [] | ||||
|         dynamic_results: list[PolicyResult] = [] | ||||
|         results: list[PolicyResult] = [] | ||||
|  | ||||
|         if self.asns: | ||||
|             static_results.append(self.passes_asn(request)) | ||||
|             results.append(self.passes_asn(request)) | ||||
|         if self.countries: | ||||
|             static_results.append(self.passes_country(request)) | ||||
|             results.append(self.passes_country(request)) | ||||
|  | ||||
|         if self.check_history_distance or self.check_impossible_travel: | ||||
|             dynamic_results.append(self.passes_distance(request)) | ||||
|  | ||||
|         if not static_results and not dynamic_results: | ||||
|         if not results: | ||||
|             return PolicyResult(True) | ||||
|  | ||||
|         passing = any(r.passing for r in static_results) and all(r.passing for r in dynamic_results) | ||||
|         messages = chain( | ||||
|             *[r.messages for r in static_results], *[r.messages for r in dynamic_results] | ||||
|         ) | ||||
|         passing = any(r.passing for r in results) | ||||
|         messages = chain(*[r.messages for r in results]) | ||||
|  | ||||
|         result = PolicyResult(passing, *messages) | ||||
|         result.source_results = list(chain(static_results, dynamic_results)) | ||||
|         result.source_results = results | ||||
|  | ||||
|         return result | ||||
|  | ||||
| @ -94,7 +73,7 @@ class GeoIPPolicy(Policy): | ||||
|  | ||||
|     def passes_country(self, request: PolicyRequest) -> PolicyResult: | ||||
|         # This is not a single get chain because `request.context` can contain `{ "geoip": None }`. | ||||
|         geoip_data: GeoIPDict | None = request.context.get("geoip") | ||||
|         geoip_data = request.context.get("geoip") | ||||
|         country = geoip_data.get("country") if geoip_data else None | ||||
|  | ||||
|         if not country: | ||||
| @ -108,42 +87,6 @@ class GeoIPPolicy(Policy): | ||||
|  | ||||
|         return PolicyResult(True) | ||||
|  | ||||
|     def passes_distance(self, request: PolicyRequest) -> PolicyResult: | ||||
|         """Check if current policy execution is out of distance range compared | ||||
|         to previous authentication requests""" | ||||
|         # Get previous login event and GeoIP data | ||||
|         previous_logins = Event.objects.filter( | ||||
|             action=EventAction.LOGIN, user__pk=request.user.pk, context__geo__isnull=False | ||||
|         ).order_by("-created")[: self.history_login_count] | ||||
|         _now = now() | ||||
|         geoip_data: GeoIPDict | None = request.context.get("geoip") | ||||
|         if not geoip_data: | ||||
|             return PolicyResult(False) | ||||
|         for previous_login in previous_logins: | ||||
|             previous_login_geoip: GeoIPDict = previous_login.context["geo"] | ||||
|  | ||||
|             # Figure out distance | ||||
|             dist = distance.geodesic( | ||||
|                 (previous_login_geoip["lat"], previous_login_geoip["long"]), | ||||
|                 (geoip_data["lat"], geoip_data["long"]), | ||||
|             ) | ||||
|             if self.check_history_distance and dist.km >= ( | ||||
|                 self.history_max_distance_km + self.distance_tolerance_km | ||||
|             ): | ||||
|                 return PolicyResult( | ||||
|                     False, _("Distance from previous authentication is larger than threshold.") | ||||
|                 ) | ||||
|             # Check if distance between `previous_login` and now is more | ||||
|             # than max distance per hour times the amount of hours since the previous login | ||||
|             # (round down to the lowest closest time of hours) | ||||
|             # clamped to be at least 1 hour | ||||
|             rel_time_hours = max(int((_now - previous_login.created).total_seconds() / 3600), 1) | ||||
|             if self.check_impossible_travel and dist.km >= ( | ||||
|                 (MAX_DISTANCE_HOUR_KM * rel_time_hours) + self.distance_tolerance_km | ||||
|             ): | ||||
|                 return PolicyResult(False, _("Distance is further than possible.")) | ||||
|         return PolicyResult(True) | ||||
|  | ||||
|     class Meta(Policy.PolicyMeta): | ||||
|         verbose_name = _("GeoIP Policy") | ||||
|         verbose_name_plural = _("GeoIP Policies") | ||||
|  | ||||
| @ -1,10 +1,8 @@ | ||||
| """geoip policy tests""" | ||||
|  | ||||
| from django.test import TestCase | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_user | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.utils import get_user | ||||
| from authentik.policies.engine import PolicyRequest, PolicyResult | ||||
| from authentik.policies.exceptions import PolicyException | ||||
| from authentik.policies.geoip.exceptions import GeoIPNotFoundException | ||||
| @ -16,8 +14,8 @@ class TestGeoIPPolicy(TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         super().setUp() | ||||
|         self.user = create_test_user() | ||||
|         self.request = PolicyRequest(self.user) | ||||
|  | ||||
|         self.request = PolicyRequest(get_anonymous_user()) | ||||
|  | ||||
|         self.context_disabled_geoip = {} | ||||
|         self.context_unknown_ip = {"asn": None, "geoip": None} | ||||
| @ -128,70 +126,3 @@ class TestGeoIPPolicy(TestCase): | ||||
|         result: PolicyResult = policy.passes(self.request) | ||||
|  | ||||
|         self.assertTrue(result.passing) | ||||
|  | ||||
|     def test_history(self): | ||||
|         """Test history checks""" | ||||
|         Event.objects.create( | ||||
|             action=EventAction.LOGIN, | ||||
|             user=get_user(self.user), | ||||
|             context={ | ||||
|                 # Random location in Canada | ||||
|                 "geo": {"lat": 55.868351, "long": -104.441011}, | ||||
|             }, | ||||
|         ) | ||||
|         # Random location in Poland | ||||
|         self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679} | ||||
|  | ||||
|         policy = GeoIPPolicy.objects.create(check_history_distance=True) | ||||
|  | ||||
|         result: PolicyResult = policy.passes(self.request) | ||||
|         self.assertFalse(result.passing) | ||||
|  | ||||
|     def test_history_no_data(self): | ||||
|         """Test history checks (with no geoip data in context)""" | ||||
|         Event.objects.create( | ||||
|             action=EventAction.LOGIN, | ||||
|             user=get_user(self.user), | ||||
|             context={ | ||||
|                 # Random location in Canada | ||||
|                 "geo": {"lat": 55.868351, "long": -104.441011}, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         policy = GeoIPPolicy.objects.create(check_history_distance=True) | ||||
|  | ||||
|         result: PolicyResult = policy.passes(self.request) | ||||
|         self.assertFalse(result.passing) | ||||
|  | ||||
|     def test_history_impossible_travel(self): | ||||
|         """Test history checks""" | ||||
|         Event.objects.create( | ||||
|             action=EventAction.LOGIN, | ||||
|             user=get_user(self.user), | ||||
|             context={ | ||||
|                 # Random location in Canada | ||||
|                 "geo": {"lat": 55.868351, "long": -104.441011}, | ||||
|             }, | ||||
|         ) | ||||
|         # Random location in Poland | ||||
|         self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679} | ||||
|  | ||||
|         policy = GeoIPPolicy.objects.create(check_impossible_travel=True) | ||||
|  | ||||
|         result: PolicyResult = policy.passes(self.request) | ||||
|         self.assertFalse(result.passing) | ||||
|  | ||||
|     def test_history_no_geoip(self): | ||||
|         """Test history checks (previous login with no geoip data)""" | ||||
|         Event.objects.create( | ||||
|             action=EventAction.LOGIN, | ||||
|             user=get_user(self.user), | ||||
|             context={}, | ||||
|         ) | ||||
|         # Random location in Poland | ||||
|         self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679} | ||||
|  | ||||
|         policy = GeoIPPolicy.objects.create(check_history_distance=True) | ||||
|  | ||||
|         result: PolicyResult = policy.passes(self.request) | ||||
|         self.assertFalse(result.passing) | ||||
|  | ||||
| @ -148,10 +148,10 @@ class PasswordPolicy(Policy): | ||||
|             user_inputs.append(request.user.email) | ||||
|         if request.http_request: | ||||
|             user_inputs.append(request.http_request.brand.branding_title) | ||||
|         # Only calculate result for the first 72 characters, as with over 100 char | ||||
|         # Only calculate result for the first 100 characters, as with over 100 char | ||||
|         # long passwords we can be reasonably sure that they'll surpass the score anyways | ||||
|         # See https://github.com/dropbox/zxcvbn#runtime-latency | ||||
|         results = zxcvbn(password[:72], user_inputs) | ||||
|         results = zxcvbn(password[:100], user_inputs) | ||||
|         LOGGER.debug("password failed", check="zxcvbn", score=results["score"]) | ||||
|         result = PolicyResult(results["score"] > self.zxcvbn_score_threshold) | ||||
|         if not result.passing: | ||||
|  | ||||
| @ -281,6 +281,7 @@ class OAuth2Provider(WebfingerProvider, Provider): | ||||
|                 }, | ||||
|             ) | ||||
|             return request.build_absolute_uri(url) | ||||
|  | ||||
|         except Provider.application.RelatedObjectDoesNotExist: | ||||
|             return None | ||||
|  | ||||
|  | ||||
| @ -1,10 +1,9 @@ | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.db.models.signals import post_save | ||||
| from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken | ||||
| from authentik.providers.oauth2.models import AccessToken | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
| @ -13,13 +12,3 @@ def user_logged_out_oauth_access_token(sender, request: HttpRequest, user: User, | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete() | ||||
|  | ||||
|  | ||||
| @receiver(post_save, sender=User) | ||||
| def user_deactivated(sender, instance: User, **_): | ||||
|     """Remove user tokens when deactivated""" | ||||
|     if instance.is_active: | ||||
|         return | ||||
|     AccessToken.objects.filter(session__user=instance).delete() | ||||
|     RefreshToken.objects.filter(session__user=instance).delete() | ||||
|     DeviceToken.objects.filter(session__user=instance).delete() | ||||
|  | ||||
| @ -150,7 +150,6 @@ class TestToken(OAuthTestCase): | ||||
|                 "id_token": provider.encode( | ||||
|                     access.id_token.to_dict(), | ||||
|                 ), | ||||
|                 "scope": "", | ||||
|             }, | ||||
|         ) | ||||
|         self.validate_jwt(access, provider) | ||||
| @ -243,7 +242,6 @@ class TestToken(OAuthTestCase): | ||||
|                 "id_token": provider.encode( | ||||
|                     access.id_token.to_dict(), | ||||
|                 ), | ||||
|                 "scope": "offline_access", | ||||
|             }, | ||||
|         ) | ||||
|         self.validate_jwt(access, provider) | ||||
| @ -303,7 +301,6 @@ class TestToken(OAuthTestCase): | ||||
|                 "id_token": provider.encode( | ||||
|                     access.id_token.to_dict(), | ||||
|                 ), | ||||
|                 "scope": "offline_access", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| @ -499,11 +499,11 @@ class OAuthFulfillmentStage(StageView): | ||||
|             ) | ||||
|  | ||||
|             challenge.is_valid() | ||||
|             self.executor.stage_ok() | ||||
|  | ||||
|             return HttpChallengeResponse( | ||||
|                 challenge=challenge, | ||||
|             ) | ||||
|         self.executor.stage_ok() | ||||
|  | ||||
|         return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme]) | ||||
|  | ||||
|     def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: | ||||
|  | ||||
| @ -64,8 +64,7 @@ def to_base64url_uint(val: int, min_length: int = 0) -> bytes: | ||||
| class JWKSView(View): | ||||
|     """Show RSA Key data for Provider""" | ||||
|  | ||||
|     @staticmethod | ||||
|     def get_jwk_for_key(key: CertificateKeyPair, use: str) -> dict | None: | ||||
|     def get_jwk_for_key(self, key: CertificateKeyPair, use: str) -> dict | None: | ||||
|         """Convert a certificate-key pair into JWK""" | ||||
|         private_key = key.private_key | ||||
|         key_data = None | ||||
| @ -124,12 +123,12 @@ class JWKSView(View): | ||||
|         response_data = {} | ||||
|  | ||||
|         if signing_key := provider.signing_key: | ||||
|             jwk = JWKSView.get_jwk_for_key(signing_key, "sig") | ||||
|             jwk = self.get_jwk_for_key(signing_key, "sig") | ||||
|             if jwk: | ||||
|                 response_data.setdefault("keys", []) | ||||
|                 response_data["keys"].append(jwk) | ||||
|         if encryption_key := provider.encryption_key: | ||||
|             jwk = JWKSView.get_jwk_for_key(encryption_key, "enc") | ||||
|             jwk = self.get_jwk_for_key(encryption_key, "enc") | ||||
|             if jwk: | ||||
|                 response_data.setdefault("keys", []) | ||||
|                 response_data["keys"].append(jwk) | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	