Compare commits
	
		
			1 Commits
		
	
	
		
			rfc8414
			...
			tests/e2e/
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4b0d641a51 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.0 | current_version = 2025.4.1 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | |||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |       uses: ScribeMD/docker-cache@0.5.0 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | |||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directories: | ||||||
|       - "/web" |       - "/web" | ||||||
|       - "/web/packages/sfe" |       - "/web/sfe" | ||||||
|       - "/web/packages/core" |  | ||||||
|       - "/web/packages/esbuild-plugin-live-reload" |  | ||||||
|       - "/packages/prettier-config" |  | ||||||
|       - "/packages/tsconfig" |  | ||||||
|       - "/packages/docusaurus-config" |  | ||||||
|       - "/packages/eslint-config" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -74,9 +68,6 @@ updates: | |||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/website" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
| @ -97,9 +88,6 @@ updates: | |||||||
|           - "swc-*" |           - "swc-*" | ||||||
|           - "lightningcss*" |           - "lightningcss*" | ||||||
|           - "@rspack/binding*" |           - "@rspack/binding*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -117,7 +116,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           body: ${{ steps.compress.outputs.markdown }} |           body: ${{ steps.compress.outputs.markdown }} | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | |||||||
|       - packages/eslint-config/** |       - packages/eslint-config/** | ||||||
|       - packages/prettier-config/** |       - packages/prettier-config/** | ||||||
|       - packages/tsconfig/** |       - packages/tsconfig/** | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |  | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| jobs: | jobs: | ||||||
|   publish: |   publish: | ||||||
| @ -17,28 +16,27 @@ jobs: | |||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         package: |         package: | ||||||
|           - packages/docusaurus-config |           - docusaurus-config | ||||||
|           - packages/eslint-config |           - eslint-config | ||||||
|           - packages/prettier-config |           - prettier-config | ||||||
|           - packages/tsconfig |           - tsconfig | ||||||
|           - web/packages/esbuild-plugin-live-reload |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 2 |           fetch-depth: 2 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: ${{ matrix.package }}/package.json |           node-version-file: packages/${{ matrix.package }}/package.json | ||||||
|           registry-url: "https://registry.npmjs.org" |           registry-url: "https://registry.npmjs.org" | ||||||
|       - name: Get changed files |       - name: Get changed files | ||||||
|         id: changed-files |         id: changed-files | ||||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c |         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||||
|         with: |         with: | ||||||
|           files: | |           files: | | ||||||
|             ${{ matrix.package }}/package.json |             packages/${{ matrix.package }}/package.json | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
|         if: steps.changed-files.outputs.any_changed == 'true' |         if: steps.changed-files.outputs.any_changed == 'true' | ||||||
|         working-directory: ${{ matrix.package }} |         working-directory: packages/${{ matrix.package}} | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|           npm run build |           npm run build | ||||||
|  | |||||||
| @ -52,6 +52,3 @@ jobs: | |||||||
|           body: "core, web: update translations" |           body: "core, web: update translations" | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} |     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -26,13 +25,23 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") |           title=$(curl -q -L \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" |           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||||
|       - name: Rename |       - name: Rename | ||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies |           curl -L \ | ||||||
|  |             -X PATCH \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||||
|  |             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build website | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| @ -20,7 +20,7 @@ COPY ./SECURITY.md /work/ | |||||||
| RUN npm run build-bundled | RUN npm run build-bundled | ||||||
|  |  | ||||||
| # Stage 2: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
| @ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Download uv | # Stage 5: Download uv | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.10 AS uv | FROM ghcr.io/astral-sh/uv:0.7.4 AS uv | ||||||
| # Stage 6: Base python image | # Stage 6: Base python image | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										51
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,7 +1,6 @@ | |||||||
| .PHONY: gen dev-reset all clean test web website | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | .SHELLFLAGS += ${SHELLFLAGS} -e | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail |  | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| @ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver) | |||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = gen-ts-api | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = gen-py-api | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = gen-go-api | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| @ -118,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | |||||||
| 	npx prettier --write diff.md | 	npx prettier --write diff.md | ||||||
|  |  | ||||||
| gen-clean-ts:  ## Remove generated API client for Typescript | gen-clean-ts:  ## Remove generated API client for Typescript | ||||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | 	rm -rf ./${GEN_API_TS}/ | ||||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||||
|  |  | ||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	rm -rf ./${GEN_API_GO}/ | ||||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) |  | ||||||
| 	make -C ${PWD}/${GEN_API_GO} clean |  | ||||||
| else |  | ||||||
| 	rm -rf ${PWD}/${GEN_API_GO} |  | ||||||
| endif |  | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean-py:  ## Remove generated API client for Python | ||||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | 	rm -rf ./${GEN_API_PY}/ | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||||
|  |  | ||||||
| @ -147,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
| 	mkdir -p web/node_modules/@goauthentik/api | 	mkdir -p web/node_modules/@goauthentik/api | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| @ -162,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	pip install ./${GEN_API_PY} | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||||
| else | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | 	cp schema.yml ./${GEN_API_GO}/ | ||||||
| endif | 	docker run \ | ||||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||||
| 	make -C ${PWD}/${GEN_API_GO} build | 		--user ${UID}:${GID} \ | ||||||
|  | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
|  | 		-i /local/schema.yml \ | ||||||
|  | 		-g go \ | ||||||
|  | 		-o /local/ \ | ||||||
|  | 		-c /local/config.yaml | ||||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||||
|  | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	uv run scripts/generate_config.py | ||||||
| @ -243,7 +244,7 @@ docker:  ## Build a docker image of the current source tree | |||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: | test-docker: | ||||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | 	BUILD=true ./scripts/test_docker.sh | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
|  | | 2025.2.x  | ✅        | | ||||||
| | 2025.4.x  | ✅        | | | 2025.4.x  | ✅        | | ||||||
| | 2025.6.x  | ✅        | |  | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.0" | __version__ = "2025.4.1" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,12 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from pathlib import Path |  | ||||||
| from tempfile import gettempdir |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.middleware import CTX_AUTH_VIA | from authentik.core.middleware import CTX_AUTH_VIA | ||||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | from authentik.core.models import Token, TokenIntents, User | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| _tmp = Path(gettempdir()) |  | ||||||
| try: |  | ||||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: |  | ||||||
|         ipc_key = _f.read() |  | ||||||
| except OSError: |  | ||||||
|     ipc_key = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> str | None: | ||||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     if user: |     if user: | ||||||
|         CTX_AUTH_VIA.set("secret_key") |         CTX_AUTH_VIA.set("secret_key") | ||||||
|         return user |         return user | ||||||
|     # then try to auth via secret key (for embedded outpost/etc) |  | ||||||
|     user = token_ipc(auth_credentials) |  | ||||||
|     if user: |  | ||||||
|         CTX_AUTH_VIA.set("ipc") |  | ||||||
|         return user |  | ||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | |||||||
|     return outpost.user |     return outpost.user | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPCUser(AnonymousUser): |  | ||||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" |  | ||||||
|  |  | ||||||
|     username = "authentik:system" |  | ||||||
|     is_active = True |  | ||||||
|     is_superuser = True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def type(self): |  | ||||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_perms(self, perm_list, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, module): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_ipc(value: str) -> User | None: |  | ||||||
|     """Check if the token is the secret key |  | ||||||
|     and return the service account for the managed outpost""" |  | ||||||
|     if not ipc_key or not compare_digest(value, ipc_key): |  | ||||||
|         return None |  | ||||||
|     return IPCUser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |             "default_application", | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "client_certificates", |  | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
| @ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "domain", |         "domain", | ||||||
|         "branding_title", |         "branding_title", | ||||||
|         "web_certificate__name", |         "web_certificate__name", | ||||||
|         "client_certificates__name", |  | ||||||
|     ] |     ] | ||||||
|     filterset_fields = [ |     filterset_fields = [ | ||||||
|         "brand_uuid", |         "brand_uuid", | ||||||
| @ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "flow_user_settings", |         "flow_user_settings", | ||||||
|         "flow_device_code", |         "flow_device_code", | ||||||
|         "web_certificate", |         "web_certificate", | ||||||
|         "client_certificates", |  | ||||||
|     ] |     ] | ||||||
|     ordering = ["domain"] |     ordering = ["domain"] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,37 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="client_certificates", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Certificates used for client authentication.", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="web_certificate", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Web Certificate used by the authentik Core webserver.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -73,13 +73,6 @@ class Brand(SerializerModel): | |||||||
|         default=None, |         default=None, | ||||||
|         on_delete=models.SET_DEFAULT, |         on_delete=models.SET_DEFAULT, | ||||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), |         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||||
|         related_name="+", |  | ||||||
|     ) |  | ||||||
|     client_certificates = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Certificates used for client authentication."), |  | ||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
|  | |||||||
| @ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
|  | |||||||
| @ -79,7 +79,6 @@ def _migrate_session( | |||||||
|         AuthenticatedSession.objects.using(db_alias).create( |         AuthenticatedSession.objects.using(db_alias).create( | ||||||
|             session=session, |             session=session, | ||||||
|             user=old_auth_session.user, |             user=old_auth_session.user, | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,81 +1,10 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps | from django.apps.registry import Apps | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( | def remove_old_authenticated_session_content_type( | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||||
| ): | ): | ||||||
| @ -92,12 +21,7 @@ class Migration(migrations.Migration): | |||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |         migrations.RunPython( | ||||||
|             code=remove_old_authenticated_session_content_type, |             code=remove_old_authenticated_session_content_type, | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.core.models import UserTypes |  | ||||||
| from authentik.crypto.apps import MANAGED_KEY | from authentik.crypto.apps import MANAGED_KEY | ||||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| @ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_certificate(self, request: Request, pk: str) -> Response: |     def view_certificate(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs certificate and log access""" |         """Return certificate-key pairs certificate and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |         Event.new(  # noqa # nosec | ||||||
|             Event.new(  # noqa # nosec |             EventAction.SECRET_VIEW, | ||||||
|                 EventAction.SECRET_VIEW, |             secret=certificate, | ||||||
|                 secret=certificate, |             type="certificate", | ||||||
|                 type="certificate", |         ).from_http(request) | ||||||
|             ).from_http(request) |  | ||||||
|         if "download" in request.query_params: |         if "download" in request.query_params: | ||||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html |             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||||
|             response = HttpResponse( |             response = HttpResponse( | ||||||
| @ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_private_key(self, request: Request, pk: str) -> Response: |     def view_private_key(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs private key and log access""" |         """Return certificate-key pairs private key and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |         Event.new(  # noqa # nosec | ||||||
|             Event.new(  # noqa # nosec |             EventAction.SECRET_VIEW, | ||||||
|                 EventAction.SECRET_VIEW, |             secret=certificate, | ||||||
|                 secret=certificate, |             type="private_key", | ||||||
|                 type="private_key", |         ).from_http(request) | ||||||
|             ).from_http(request) |  | ||||||
|         if "download" in request.query_params: |         if "download" in request.query_params: | ||||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html |             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||||
|             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") |             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") | ||||||
|  | |||||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | |||||||
|     """Google client for groups""" |     """Google client for groups""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderGroup |     connection_type = GoogleWorkspaceProviderGroup | ||||||
|     connection_attr = "googleworkspaceprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | |||||||
|     """Sync authentik users into google workspace""" |     """Sync authentik users into google workspace""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderUser |     connection_type = GoogleWorkspaceProviderUser | ||||||
|     connection_attr = "googleworkspaceprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("googleworkspaceprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("googleworkspaceprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def google_credentials(self): |     def google_credentials(self): | ||||||
|  | |||||||
| @ -29,7 +29,7 @@ class MicrosoftEntraGroupClient( | |||||||
|     """Microsoft client for groups""" |     """Microsoft client for groups""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderGroup |     connection_type = MicrosoftEntraProviderGroup | ||||||
|     connection_attr = "microsoftentraprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv | |||||||
|     """Sync authentik users into microsoft entra""" |     """Sync authentik users into microsoft entra""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderUser |     connection_type = MicrosoftEntraProviderUser | ||||||
|     connection_attr = "microsoftentraprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("microsoftentraprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("microsoftentraprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def microsoft_credentials(self): |     def microsoft_credentials(self): | ||||||
|  | |||||||
| @ -19,7 +19,6 @@ TENANT_APPS = [ | |||||||
|     "authentik.enterprise.providers.microsoft_entra", |     "authentik.enterprise.providers.microsoft_entra", | ||||||
|     "authentik.enterprise.providers.ssf", |     "authentik.enterprise.providers.ssf", | ||||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", |     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||||
|     "authentik.enterprise.stages.mtls", |  | ||||||
|     "authentik.enterprise.stages.source", |     "authentik.enterprise.stages.source", | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,31 +0,0 @@ | |||||||
| """Mutual TLS Stage API Views""" |  | ||||||
|  |  | ||||||
| from rest_framework.viewsets import ModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin |  | ||||||
| from authentik.enterprise.api import EnterpriseRequiredMixin |  | ||||||
| from authentik.enterprise.stages.mtls.models import MutualTLSStage |  | ||||||
| from authentik.flows.api.stages import StageSerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer): |  | ||||||
|     """MutualTLSStage Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = MutualTLSStage |  | ||||||
|         fields = StageSerializer.Meta.fields + [ |  | ||||||
|             "mode", |  | ||||||
|             "certificate_authorities", |  | ||||||
|             "cert_attribute", |  | ||||||
|             "user_attribute", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageViewSet(UsedByMixin, ModelViewSet): |  | ||||||
|     """MutualTLSStage Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = MutualTLSStage.objects.all() |  | ||||||
|     serializer_class = MutualTLSStageSerializer |  | ||||||
|     filterset_fields = "__all__" |  | ||||||
|     ordering = ["name"] |  | ||||||
|     search_fields = ["name"] |  | ||||||
| @ -1,12 +0,0 @@ | |||||||
| """authentik stage app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig): |  | ||||||
|     """authentik MTLS stage config""" |  | ||||||
|  |  | ||||||
|     name = "authentik.enterprise.stages.mtls" |  | ||||||
|     label = "authentik_stages_mtls" |  | ||||||
|     verbose_name = "authentik Enterprise.Stages.MTLS" |  | ||||||
|     default = True |  | ||||||
| @ -1,68 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 18:29 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     initial = True |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="MutualTLSStage", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "stage_ptr", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         auto_created=True, |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         parent_link=True, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_flows.stage", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "mode", |  | ||||||
|                     models.TextField(choices=[("optional", "Optional"), ("required", "Required")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "cert_attribute", |  | ||||||
|                     models.TextField( |  | ||||||
|                         choices=[ |  | ||||||
|                             ("subject", "Subject"), |  | ||||||
|                             ("common_name", "Common Name"), |  | ||||||
|                             ("email", "Email"), |  | ||||||
|                         ] |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "user_attribute", |  | ||||||
|                     models.TextField(choices=[("username", "Username"), ("email", "Email")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "certificate_authorities", |  | ||||||
|                     models.ManyToManyField( |  | ||||||
|                         blank=True, |  | ||||||
|                         default=None, |  | ||||||
|                         help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.", |  | ||||||
|                         to="authentik_crypto.certificatekeypair", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Mutual TLS Stage", |  | ||||||
|                 "verbose_name_plural": "Mutual TLS Stages", |  | ||||||
|                 "permissions": [ |  | ||||||
|                     ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.") |  | ||||||
|                 ], |  | ||||||
|             }, |  | ||||||
|             bases=("authentik_flows.stage",), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,71 +0,0 @@ | |||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from rest_framework.serializers import Serializer |  | ||||||
|  |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.flows.models import Stage |  | ||||||
| from authentik.flows.stage import StageView |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TLSMode(models.TextChoices): |  | ||||||
|     """Modes the TLS Stage can operate in""" |  | ||||||
|  |  | ||||||
|     OPTIONAL = "optional" |  | ||||||
|     REQUIRED = "required" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CertAttributes(models.TextChoices): |  | ||||||
|     """Certificate attribute used for user matching""" |  | ||||||
|  |  | ||||||
|     SUBJECT = "subject" |  | ||||||
|     COMMON_NAME = "common_name" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserAttributes(models.TextChoices): |  | ||||||
|     """User attribute for user matching""" |  | ||||||
|  |  | ||||||
|     USERNAME = "username" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStage(Stage): |  | ||||||
|     """Authenticate/enroll users using a client-certificate.""" |  | ||||||
|  |  | ||||||
|     mode = models.TextField(choices=TLSMode.choices) |  | ||||||
|  |  | ||||||
|     certificate_authorities = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_( |  | ||||||
|             "Configure certificate authorities to validate the certificate against. " |  | ||||||
|             "This option has a higher priority than the `client_certificate` option on `Brand`." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cert_attribute = models.TextField(choices=CertAttributes.choices) |  | ||||||
|     user_attribute = models.TextField(choices=UserAttributes.choices) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def view(self) -> type[StageView]: |  | ||||||
|         from authentik.enterprise.stages.mtls.stage import MTLSStageView |  | ||||||
|  |  | ||||||
|         return MTLSStageView |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|         return MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def component(self) -> str: |  | ||||||
|         return "ak-stage-mtls-form" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Mutual TLS Stage") |  | ||||||
|         verbose_name_plural = _("Mutual TLS Stages") |  | ||||||
|         permissions = [ |  | ||||||
|             ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")), |  | ||||||
|         ] |  | ||||||
| @ -1,230 +0,0 @@ | |||||||
| from binascii import hexlify |  | ||||||
| from urllib.parse import unquote_plus |  | ||||||
|  |  | ||||||
| from cryptography.exceptions import InvalidSignature |  | ||||||
| from cryptography.hazmat.primitives import hashes |  | ||||||
| from cryptography.x509 import ( |  | ||||||
|     Certificate, |  | ||||||
|     NameOID, |  | ||||||
|     ObjectIdentifier, |  | ||||||
|     UnsupportedGeneralNameType, |  | ||||||
|     load_pem_x509_certificate, |  | ||||||
| ) |  | ||||||
| from cryptography.x509.verification import PolicyBuilder, Store, VerificationError |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.flows.challenge import AccessDeniedChallenge |  | ||||||
| from authentik.flows.models import FlowDesignation |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.stage import ChallengeStageView |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
| # All of these headers must only be accepted from "trusted" reverse proxies |  | ||||||
| # See internal/web/proxy.go:39 |  | ||||||
| HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert" |  | ||||||
| HEADER_NGINX_FORWARDED = "SSL-Client-Cert" |  | ||||||
| HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert" |  | ||||||
| HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| PLAN_CONTEXT_CERTIFICATE = "certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageView(ChallengeStageView): |  | ||||||
|  |  | ||||||
|     def __parse_single_cert(self, raw: str | None) -> list[Certificate]: |  | ||||||
|         """Helper to parse a single certificate""" |  | ||||||
|         if not raw: |  | ||||||
|             return [] |  | ||||||
|         try: |  | ||||||
|             cert = load_pem_x509_certificate(unquote_plus(raw).encode()) |  | ||||||
|             return [cert] |  | ||||||
|         except ValueError as exc: |  | ||||||
|             self.logger.info("Failed to parse certificate", exc=exc) |  | ||||||
|             return [] |  | ||||||
|  |  | ||||||
|     def _parse_cert_xfcc(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format given to us in |  | ||||||
|         the format of the authentik router/envoy""" |  | ||||||
|         xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED) |  | ||||||
|         if not xfcc_raw: |  | ||||||
|             return [] |  | ||||||
|         certs = [] |  | ||||||
|         for r_cert in xfcc_raw.split(","): |  | ||||||
|             el = r_cert.split(";") |  | ||||||
|             raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el} |  | ||||||
|             if "Cert" not in raw_cert: |  | ||||||
|                 continue |  | ||||||
|             certs.extend(self.__parse_single_cert(raw_cert["Cert"])) |  | ||||||
|         return certs |  | ||||||
|  |  | ||||||
|     def _parse_cert_nginx(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format nginx-ingress gives to us""" |  | ||||||
|         sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(sslcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_traefik(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format traefik gives to us""" |  | ||||||
|         ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(ftcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_outpost(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format outposts give to us. Also authenticates |  | ||||||
|         the outpost to ensure it has the permission to do so""" |  | ||||||
|         user = ClientIPMiddleware.get_outpost_user(self.request) |  | ||||||
|         if not user: |  | ||||||
|             return [] |  | ||||||
|         if not user.has_perm( |  | ||||||
|             "pass_outpost_certificate", self.executor.current_stage |  | ||||||
|         ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"): |  | ||||||
|             return [] |  | ||||||
|         outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(outpost_raw) |  | ||||||
|  |  | ||||||
|     def get_authorities(self) -> list[CertificateKeyPair] | None: |  | ||||||
|         # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would |  | ||||||
|         # load the certificate into the directly referenced foreign key, which we have to pickle |  | ||||||
|         # as part of the flow plan, and cryptography certs can't be pickled |  | ||||||
|         stage: MutualTLSStage = ( |  | ||||||
|             MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk) |  | ||||||
|             .prefetch_related("certificate_authorities") |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if stage.certificate_authorities.exists(): |  | ||||||
|             return stage.certificate_authorities.order_by("name") |  | ||||||
|         brand: Brand = self.request.brand |  | ||||||
|         if brand.client_certificates.exists(): |  | ||||||
|             return brand.client_certificates.order_by("name") |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): |  | ||||||
|         authorities_cert = [x.certificate for x in authorities] |  | ||||||
|         for _cert in certs: |  | ||||||
|             try: |  | ||||||
|                 PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( |  | ||||||
|                     _cert, [] |  | ||||||
|                 ) |  | ||||||
|                 return _cert |  | ||||||
|             except ( |  | ||||||
|                 InvalidSignature, |  | ||||||
|                 TypeError, |  | ||||||
|                 ValueError, |  | ||||||
|                 VerificationError, |  | ||||||
|                 UnsupportedGeneralNameType, |  | ||||||
|             ) as exc: |  | ||||||
|                 self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc) |  | ||||||
|                 continue |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def check_if_user(self, cert: Certificate): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         cert_attr = None |  | ||||||
|         user_attr = None |  | ||||||
|         match stage.cert_attribute: |  | ||||||
|             case CertAttributes.SUBJECT: |  | ||||||
|                 cert_attr = cert.subject.rfc4514_string() |  | ||||||
|             case CertAttributes.COMMON_NAME: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME) |  | ||||||
|             case CertAttributes.EMAIL: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS) |  | ||||||
|         match stage.user_attribute: |  | ||||||
|             case UserAttributes.USERNAME: |  | ||||||
|                 user_attr = "username" |  | ||||||
|             case UserAttributes.EMAIL: |  | ||||||
|                 user_attr = "email" |  | ||||||
|         if not user_attr or not cert_attr: |  | ||||||
|             return None |  | ||||||
|         return User.objects.filter(**{user_attr: cert_attr}).first() |  | ||||||
|  |  | ||||||
|     def _cert_to_dict(self, cert: Certificate) -> dict: |  | ||||||
|         """Represent a certificate in a dictionary, as certificate objects cannot be pickled""" |  | ||||||
|         return { |  | ||||||
|             "serial_number": str(cert.serial_number), |  | ||||||
|             "subject": cert.subject.rfc4514_string(), |  | ||||||
|             "issuer": cert.issuer.rfc4514_string(), |  | ||||||
|             "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), |  | ||||||
|             "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec |  | ||||||
|                 "utf-8" |  | ||||||
|             ), |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     def auth_user(self, user: User, cert: Certificate): |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls") |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update( |  | ||||||
|             {"certificate": self._cert_to_dict(cert)} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def enroll_prepare_user(self, cert: Certificate): |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PROMPT].update( |  | ||||||
|             { |  | ||||||
|                 "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS), |  | ||||||
|                 "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert) |  | ||||||
|  |  | ||||||
|     def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None: |  | ||||||
|         attr = cert.subject.get_attributes_for_oid(oid) |  | ||||||
|         if len(attr) < 1: |  | ||||||
|             return None |  | ||||||
|         return str(attr[0].value) |  | ||||||
|  |  | ||||||
|     def dispatch(self, request, *args, **kwargs): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         certs = [ |  | ||||||
|             *self._parse_cert_xfcc(), |  | ||||||
|             *self._parse_cert_nginx(), |  | ||||||
|             *self._parse_cert_traefik(), |  | ||||||
|             *self._parse_cert_outpost(), |  | ||||||
|         ] |  | ||||||
|         authorities = self.get_authorities() |  | ||||||
|         if not authorities: |  | ||||||
|             self.logger.warning("No Certificate authority found") |  | ||||||
|             if stage.mode == TLSMode.OPTIONAL: |  | ||||||
|                 return self.executor.stage_ok() |  | ||||||
|             if stage.mode == TLSMode.REQUIRED: |  | ||||||
|                 return super().dispatch(request, *args, **kwargs) |  | ||||||
|         cert = self.validate_cert(authorities, certs) |  | ||||||
|         if not cert and stage.mode == TLSMode.REQUIRED: |  | ||||||
|             self.logger.warning("Client certificate required but no certificates given") |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, |  | ||||||
|                 *args, |  | ||||||
|                 error_message=_("Certificate required but no certificate was given."), |  | ||||||
|                 **kwargs, |  | ||||||
|             ) |  | ||||||
|         if not cert and stage.mode == TLSMode.OPTIONAL: |  | ||||||
|             self.logger.info("No certificate given, continuing") |  | ||||||
|             return self.executor.stage_ok() |  | ||||||
|         existing_user = self.check_if_user(cert) |  | ||||||
|         if self.executor.flow.designation == FlowDesignation.ENROLLMENT: |  | ||||||
|             self.enroll_prepare_user(cert) |  | ||||||
|         elif existing_user: |  | ||||||
|             self.auth_user(existing_user, cert) |  | ||||||
|         else: |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, *args, error_message=_("No user found for certificate."), **kwargs |  | ||||||
|             ) |  | ||||||
|         return self.executor.stage_ok() |  | ||||||
|  |  | ||||||
|     def get_challenge(self, *args, error_message: str | None = None, **kwargs): |  | ||||||
|         return AccessDeniedChallenge( |  | ||||||
|             data={ |  | ||||||
|                 "component": "ak-stage-access-denied", |  | ||||||
|                 "error_message": str(error_message or "Unknown error"), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw |  | ||||||
| MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE |  | ||||||
| CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN |  | ||||||
| AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x |  | ||||||
| LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje |  | ||||||
| O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+ |  | ||||||
| 5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2 |  | ||||||
| pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A |  | ||||||
| SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1 |  | ||||||
| 2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza |  | ||||||
| hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7 |  | ||||||
| WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF |  | ||||||
| HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu |  | ||||||
| YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY |  | ||||||
| 0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G |  | ||||||
| A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA |  | ||||||
| NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2 |  | ||||||
| 6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo |  | ||||||
| +jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV |  | ||||||
| xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2 |  | ||||||
| C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq |  | ||||||
| nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz |  | ||||||
| NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1 |  | ||||||
| uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ |  | ||||||
| jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG |  | ||||||
| G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0 |  | ||||||
| YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw |  | ||||||
| NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA |  | ||||||
| A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 |  | ||||||
| 7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO |  | ||||||
| mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj |  | ||||||
| +mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S |  | ||||||
| qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 |  | ||||||
| +yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC |  | ||||||
| 3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O |  | ||||||
| O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E |  | ||||||
| 0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh |  | ||||||
| wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw |  | ||||||
| Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID |  | ||||||
| AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE |  | ||||||
| FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud |  | ||||||
| DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz |  | ||||||
| YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw |  | ||||||
| zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi |  | ||||||
| 9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ |  | ||||||
| /CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp |  | ||||||
| dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE |  | ||||||
| AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV |  | ||||||
| 9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 |  | ||||||
| m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L |  | ||||||
| jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ |  | ||||||
| NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu |  | ||||||
| nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,228 +0,0 @@ | |||||||
| from unittest.mock import MagicMock, patch |  | ||||||
| from urllib.parse import quote_plus |  | ||||||
|  |  | ||||||
| from django.urls import reverse |  | ||||||
| from guardian.shortcuts import assign_perm |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.core.tests.utils import ( |  | ||||||
|     create_test_brand, |  | ||||||
|     create_test_cert, |  | ||||||
|     create_test_flow, |  | ||||||
|     create_test_user, |  | ||||||
| ) |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE |  | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.lib.tests.utils import load_fixture |  | ||||||
| from authentik.outposts.models import Outpost, OutpostType |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageTests(FlowTestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super().setUp() |  | ||||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|         self.ca = CertificateKeyPair.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             certificate_data=load_fixture("fixtures/ca.pem"), |  | ||||||
|         ) |  | ||||||
|         self.stage = MutualTLSStage.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             mode=TLSMode.REQUIRED, |  | ||||||
|             cert_attribute=CertAttributes.COMMON_NAME, |  | ||||||
|             user_attribute=UserAttributes.USERNAME, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.stage.certificate_authorities.add(self.ca) |  | ||||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0) |  | ||||||
|         self.client_cert = load_fixture("fixtures/cert_client.pem") |  | ||||||
|         # User matching the certificate |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         self.cert_user = create_test_user(username="client") |  | ||||||
|  |  | ||||||
|     def test_parse_xfcc(self): |  | ||||||
|         """Test authentik Proxy/Envoy's XFCC format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_nginx(self): |  | ||||||
|         """Test nginx's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"SSL-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_traefik(self): |  | ||||||
|         """Test traefik's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_object(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("pass_outpost_certificate", outpost.user, self.stage) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_global(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_no_perm(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_invalid_cert(self): |  | ||||||
|         """Test invalid certificate""" |  | ||||||
|         cert = create_test_cert() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|         self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context) |  | ||||||
|  |  | ||||||
|     def test_auth_no_user(self): |  | ||||||
|         """Test auth with no user""" |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_brand_ca(self): |  | ||||||
|         """Test using a CA from the brand""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|  |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.client_certificates.add(self.ca) |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_no_ca_optional(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_no_ca_required(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_no_cert_optional(self): |  | ||||||
|         """Test using no cert Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_enroll(self): |  | ||||||
|         """Test Enrollment flow""" |  | ||||||
|         self.flow.designation = FlowDesignation.ENROLLMENT |  | ||||||
|         self.flow.save() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"}) |  | ||||||
|         self.assertEqual( |  | ||||||
|             plan().context[PLAN_CONTEXT_CERTIFICATE], |  | ||||||
|             { |  | ||||||
|                 "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", |  | ||||||
|                 "fingerprint_sha256": ( |  | ||||||
|                     "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" |  | ||||||
|                 ), |  | ||||||
|                 "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", |  | ||||||
|                 "serial_number": "70153443448884702681996102271549704759327537151", |  | ||||||
|                 "subject": "CN=client", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @ -1,5 +0,0 @@ | |||||||
| """API URLs""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet |  | ||||||
|  |  | ||||||
| api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="flowtoken", |  | ||||||
|             name="revoke_on_execution", |  | ||||||
|             field=models.BooleanField(default=True), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | |||||||
|  |  | ||||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) |     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||||
|     _plan = models.TextField() |     _plan = models.TextField() | ||||||
|     revoke_on_execution = models.BooleanField(default=True) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def pickle(plan: "FlowPlan") -> str: |     def pickle(plan) -> str: | ||||||
|         """Pickle into string""" |         """Pickle into string""" | ||||||
|         data = dumps(plan) |         data = dumps(plan) | ||||||
|         return b64encode(data).decode() |         return b64encode(data).decode() | ||||||
|  | |||||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | |||||||
|             self.logger.debug("Got StageInvalidException", exc=exc) |             self.logger.debug("Got StageInvalidException", exc=exc) | ||||||
|             return self.executor.stage_invalid() |             return self.executor.stage_invalid() | ||||||
|         if not challenge.is_valid(): |         if not challenge.is_valid(): | ||||||
|             self.logger.error( |             self.logger.warning( | ||||||
|                 "f(ch): Invalid challenge", |                 "f(ch): Invalid challenge", | ||||||
|                 errors=challenge.errors, |                 errors=challenge.errors, | ||||||
|                 challenge=challenge.data, |  | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge) |         return HttpChallengeResponse(challenge) | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,10 +1,7 @@ | |||||||
| """Test helpers""" | """Test helpers""" | ||||||
|  |  | ||||||
| from collections.abc import Callable, Generator |  | ||||||
| from contextlib import contextmanager |  | ||||||
| from json import loads | from json import loads | ||||||
| from typing import Any | from typing import Any | ||||||
| from unittest.mock import MagicMock, patch |  | ||||||
|  |  | ||||||
| from django.http.response import HttpResponse | from django.http.response import HttpResponse | ||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| @ -12,8 +9,6 @@ from rest_framework.test import APITestCase | |||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.flows.models import Flow | from authentik.flows.models import Flow | ||||||
| from authentik.flows.planner import FlowPlan |  | ||||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FlowTestCase(APITestCase): | class FlowTestCase(APITestCase): | ||||||
| @ -49,12 +44,3 @@ class FlowTestCase(APITestCase): | |||||||
|     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: |     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||||
|         """Wrapper around assertStageResponse that checks for a redirect""" |         """Wrapper around assertStageResponse that checks for a redirect""" | ||||||
|         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) |         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) | ||||||
|  |  | ||||||
|     @contextmanager |  | ||||||
|     def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]: |  | ||||||
|         """Capture the flow plan before the flow finishes and return it""" |  | ||||||
|         try: |  | ||||||
|             with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): |  | ||||||
|                 yield lambda: self.client.session.get(SESSION_KEY_PLAN) |  | ||||||
|         finally: |  | ||||||
|             pass |  | ||||||
|  | |||||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | |||||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: |         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) |             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||||
|         finally: |         finally: | ||||||
|             if token.revoke_on_execution: |             token.delete() | ||||||
|                 token.delete() |  | ||||||
|         if not isinstance(plan, FlowPlan): |         if not isinstance(plan, FlowPlan): | ||||||
|             return None |             return None | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
|  | |||||||
| @ -81,6 +81,7 @@ debugger: false | |||||||
|  |  | ||||||
| log_level: info | log_level: info | ||||||
|  |  | ||||||
|  | session_storage: cache | ||||||
| sessions: | sessions: | ||||||
|   unauthenticated_age: days=1 |   unauthenticated_age: days=1 | ||||||
|  |  | ||||||
|  | |||||||
| @ -23,6 +23,7 @@ if TYPE_CHECKING: | |||||||
|  |  | ||||||
|  |  | ||||||
| class Direction(StrEnum): | class Direction(StrEnum): | ||||||
|  |  | ||||||
|     add = "add" |     add = "add" | ||||||
|     remove = "remove" |     remove = "remove" | ||||||
|  |  | ||||||
| @ -36,16 +37,13 @@ SAFE_METHODS = [ | |||||||
|  |  | ||||||
|  |  | ||||||
| class BaseOutgoingSyncClient[ | class BaseOutgoingSyncClient[ | ||||||
|     TModel: "Model", |     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" | ||||||
|     TConnection: "Model", |  | ||||||
|     TSchema: dict, |  | ||||||
|     TProvider: "OutgoingSyncProvider", |  | ||||||
| ]: | ]: | ||||||
|     """Basic Outgoing sync client Client""" |     """Basic Outgoing sync client Client""" | ||||||
|  |  | ||||||
|     provider: TProvider |     provider: TProvider | ||||||
|     connection_type: type[TConnection] |     connection_type: type[TConnection] | ||||||
|     connection_attr: str |     connection_type_query: str | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     can_discover = False |     can_discover = False | ||||||
| @ -65,7 +63,9 @@ class BaseOutgoingSyncClient[ | |||||||
|     def write(self, obj: TModel) -> tuple[TConnection, bool]: |     def write(self, obj: TModel) -> tuple[TConnection, bool]: | ||||||
|         """Write object to destination. Uses self.create and self.update, but |         """Write object to destination. Uses self.create and self.update, but | ||||||
|         can be overwritten for further logic""" |         can be overwritten for further logic""" | ||||||
|         connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first() |         connection = self.connection_type.objects.filter( | ||||||
|  |             provider=self.provider, **{self.connection_type_query: obj} | ||||||
|  |         ).first() | ||||||
|         try: |         try: | ||||||
|             if not connection: |             if not connection: | ||||||
|                 connection = self.create(obj) |                 connection = self.create(obj) | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
|  |  | ||||||
| from celery import group |  | ||||||
| from celery.exceptions import Retry | from celery.exceptions import Retry | ||||||
| from celery.result import allow_join_result | from celery.result import allow_join_result | ||||||
| from django.core.paginator import Paginator | from django.core.paginator import Paginator | ||||||
| @ -83,41 +82,21 @@ class SyncTasks: | |||||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) |                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||||
|                 return |                 return | ||||||
|             try: |             try: | ||||||
|                 messages.append(_("Syncing users")) |                 for page in users_paginator.page_range: | ||||||
|                 user_results = ( |                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(User), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(User), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in users_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in user_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|                 messages.append(_("Syncing groups")) |                 for page in groups_paginator.page_range: | ||||||
|                 group_results = ( |                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(Group), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(Group), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in groups_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in group_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|             except TransientSyncException as exc: |             except TransientSyncException as exc: | ||||||
|                 self.logger.warning("transient sync exception", exc=exc) |                 self.logger.warning("transient sync exception", exc=exc) | ||||||
| @ -130,7 +109,7 @@ class SyncTasks: | |||||||
|     def sync_objects( |     def sync_objects( | ||||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter |         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||||
|     ): |     ): | ||||||
|         _object_type: type[Model] = path_to_class(object_type) |         _object_type = path_to_class(object_type) | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
|             provider_type=class_to_path(self._provider_model), |             provider_type=class_to_path(self._provider_model), | ||||||
|             provider_pk=provider_pk, |             provider_pk=provider_pk, | ||||||
| @ -153,19 +132,6 @@ class SyncTasks: | |||||||
|             self.logger.debug("starting discover") |             self.logger.debug("starting discover") | ||||||
|             client.discover() |             client.discover() | ||||||
|         self.logger.debug("starting sync for page", page=page) |         self.logger.debug("starting sync for page", page=page) | ||||||
|         messages.append( |  | ||||||
|             asdict( |  | ||||||
|                 LogEvent( |  | ||||||
|                     _( |  | ||||||
|                         "Syncing page {page} of {object_type}".format( |  | ||||||
|                             page=page, object_type=_object_type._meta.verbose_name_plural |  | ||||||
|                         ) |  | ||||||
|                     ), |  | ||||||
|                     log_level="info", |  | ||||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         for obj in paginator.page(page).object_list: |         for obj in paginator.page(page).object_list: | ||||||
|             obj: Model |             obj: Model | ||||||
|             try: |             try: | ||||||
|  | |||||||
| @ -1,11 +1,9 @@ | |||||||
| """Websocket tests""" | """Websocket tests""" | ||||||
|  |  | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from unittest.mock import patch |  | ||||||
|  |  | ||||||
| from channels.routing import URLRouter | from channels.routing import URLRouter | ||||||
| from channels.testing import WebsocketCommunicator | from channels.testing import WebsocketCommunicator | ||||||
| from django.contrib.contenttypes.models import ContentType |  | ||||||
| from django.test import TransactionTestCase | from django.test import TransactionTestCase | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import __version__ | ||||||
| @ -16,12 +14,6 @@ from authentik.providers.proxy.models import ProxyProvider | |||||||
| from authentik.root import websocket | from authentik.root import websocket | ||||||
|  |  | ||||||
|  |  | ||||||
| def patched__get_ct_cached(app_label, codename): |  | ||||||
|     """Caches `ContentType` instances like its `QuerySet` does.""" |  | ||||||
|     return ContentType.objects.get(app_label=app_label, permission__codename=codename) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached) |  | ||||||
| class TestOutpostWS(TransactionTestCase): | class TestOutpostWS(TransactionTestCase): | ||||||
|     """Websocket tests""" |     """Websocket tests""" | ||||||
|  |  | ||||||
| @ -46,7 +38,6 @@ class TestOutpostWS(TransactionTestCase): | |||||||
|         ) |         ) | ||||||
|         connected, _ = await communicator.connect() |         connected, _ = await communicator.connect() | ||||||
|         self.assertFalse(connected) |         self.assertFalse(connected) | ||||||
|         await communicator.disconnect() |  | ||||||
|  |  | ||||||
|     async def test_auth_valid(self): |     async def test_auth_valid(self): | ||||||
|         """Test auth with token""" |         """Test auth with token""" | ||||||
| @ -57,7 +48,6 @@ class TestOutpostWS(TransactionTestCase): | |||||||
|         ) |         ) | ||||||
|         connected, _ = await communicator.connect() |         connected, _ = await communicator.connect() | ||||||
|         self.assertTrue(connected) |         self.assertTrue(connected) | ||||||
|         await communicator.disconnect() |  | ||||||
|  |  | ||||||
|     async def test_send(self): |     async def test_send(self): | ||||||
|         """Test sending of Hello""" |         """Test sending of Hello""" | ||||||
|  | |||||||
| @ -7,8 +7,10 @@ from django.db import migrations | |||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|  |     from authentik.core.models import User | ||||||
|     from django.apps import apps as real_apps |     from django.apps import apps as real_apps | ||||||
|     from django.contrib.auth.management import create_permissions |     from django.contrib.auth.management import create_permissions | ||||||
|  |     from guardian.shortcuts import UserObjectPermission | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |     db_alias = schema_editor.connection.alias | ||||||
|  |  | ||||||
|  | |||||||
| @ -50,4 +50,3 @@ AMR_PASSWORD = "pwd"  # nosec | |||||||
| AMR_MFA = "mfa" | AMR_MFA = "mfa" | ||||||
| AMR_OTP = "otp" | AMR_OTP = "otp" | ||||||
| AMR_WEBAUTHN = "user" | AMR_WEBAUTHN = "user" | ||||||
| AMR_SMART_CARD = "sc" |  | ||||||
|  | |||||||
| @ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import ( | |||||||
|     ACR_AUTHENTIK_DEFAULT, |     ACR_AUTHENTIK_DEFAULT, | ||||||
|     AMR_MFA, |     AMR_MFA, | ||||||
|     AMR_PASSWORD, |     AMR_PASSWORD, | ||||||
|     AMR_SMART_CARD, |  | ||||||
|     AMR_WEBAUTHN, |     AMR_WEBAUTHN, | ||||||
| ) | ) | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||||
| @ -140,10 +139,9 @@ class IDToken: | |||||||
|                 amr.append(AMR_PASSWORD) |                 amr.append(AMR_PASSWORD) | ||||||
|             if method == "auth_webauthn_pwl": |             if method == "auth_webauthn_pwl": | ||||||
|                 amr.append(AMR_WEBAUTHN) |                 amr.append(AMR_WEBAUTHN) | ||||||
|             if "certificate" in method_args: |  | ||||||
|                 amr.append(AMR_SMART_CARD) |  | ||||||
|             if "mfa_devices" in method_args: |             if "mfa_devices" in method_args: | ||||||
|                 amr.append(AMR_MFA) |                 if len(amr) > 0: | ||||||
|  |                     amr.append(AMR_MFA) | ||||||
|             if amr: |             if amr: | ||||||
|                 id_token.amr = amr |                 id_token.amr = amr | ||||||
|  |  | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from django.urls import include, path | |||||||
| from authentik.providers.oauth2.views.authorize import AuthorizationFlowInitView | from authentik.providers.oauth2.views.authorize import AuthorizationFlowInitView | ||||||
| from authentik.providers.oauth2.views.device_init import DeviceEntryView | from authentik.providers.oauth2.views.device_init import DeviceEntryView | ||||||
| from authentik.providers.oauth2.views.github import GitHubUserTeamsView, GitHubUserView | from authentik.providers.oauth2.views.github import GitHubUserTeamsView, GitHubUserView | ||||||
| from authentik.providers.oauth2.views.provider import ProviderInfoView |  | ||||||
| from authentik.providers.oauth2.views.token import TokenView | from authentik.providers.oauth2.views.token import TokenView | ||||||
|  |  | ||||||
| github_urlpatterns = [ | github_urlpatterns = [ | ||||||
| @ -41,9 +40,4 @@ urlpatterns = [ | |||||||
|         ), |         ), | ||||||
|         name="device-login", |         name="device-login", | ||||||
|     ), |     ), | ||||||
|     path( |  | ||||||
|         ".well-known/oauth-authorization-server/application/o/<slug:application_slug>/", |  | ||||||
|         ProviderInfoView.as_view(), |  | ||||||
|         name="providers-oauth2-authorization-server-metadata", |  | ||||||
|     ), |  | ||||||
| ] | ] | ||||||
|  | |||||||
| @ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]): | |||||||
|     def reconcile(self, current: V1Ingress, reference: V1Ingress): |     def reconcile(self, current: V1Ingress, reference: V1Ingress): | ||||||
|         super().reconcile(current, reference) |         super().reconcile(current, reference) | ||||||
|         self._check_annotations(current, reference) |         self._check_annotations(current, reference) | ||||||
|         if current.spec.ingress_class_name != reference.spec.ingress_class_name: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|         # Create a list of all expected host and tls hosts |         # Create a list of all expected host and tls hosts | ||||||
|         expected_hosts = [] |         expected_hosts = [] | ||||||
|         expected_hosts_tls = [] |         expected_hosts_tls = [] | ||||||
|  | |||||||
| @ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel): | |||||||
|         always_merger.merge(settings, default_settings) |         always_merger.merge(settings, default_settings) | ||||||
|         always_merger.merge(settings, self.endpoint.provider.settings) |         always_merger.merge(settings, self.endpoint.provider.settings) | ||||||
|         always_merger.merge(settings, self.endpoint.settings) |         always_merger.merge(settings, self.endpoint.settings) | ||||||
|  |         always_merger.merge(settings, self.settings) | ||||||
|  |  | ||||||
|         def mapping_evaluator(mappings: QuerySet): |         def mapping_evaluator(mappings: QuerySet): | ||||||
|             for mapping in mappings: |             for mapping in mappings: | ||||||
| @ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel): | |||||||
|         mapping_evaluator( |         mapping_evaluator( | ||||||
|             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") |             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") | ||||||
|         ) |         ) | ||||||
|         always_merger.merge(settings, self.settings) |  | ||||||
|  |  | ||||||
|         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec |         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec | ||||||
|         settings["create-drive-path"] = "true" |         settings["create-drive-path"] = "true" | ||||||
|  | |||||||
| @ -90,6 +90,23 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |         # Set settings in token | ||||||
|  |         token.settings = { | ||||||
|  |             "level": "token", | ||||||
|  |         } | ||||||
|  |         token.save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             token.get_settings(), | ||||||
|  |             { | ||||||
|  |                 "hostname": self.endpoint.host.split(":")[0], | ||||||
|  |                 "port": "1324", | ||||||
|  |                 "client-name": f"authentik - {self.user}", | ||||||
|  |                 "drive-path": path, | ||||||
|  |                 "create-drive-path": "true", | ||||||
|  |                 "level": "token", | ||||||
|  |                 "resize-method": "display-update", | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|         # Set settings in property mapping (provider) |         # Set settings in property mapping (provider) | ||||||
|         mapping = RACPropertyMapping.objects.create( |         mapping = RACPropertyMapping.objects.create( | ||||||
|             name=generate_id(), |             name=generate_id(), | ||||||
| @ -134,22 +151,3 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         # Set settings in token |  | ||||||
|         token.settings = { |  | ||||||
|             "level": "token", |  | ||||||
|         } |  | ||||||
|         token.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             token.get_settings(), |  | ||||||
|             { |  | ||||||
|                 "hostname": self.endpoint.host.split(":")[0], |  | ||||||
|                 "port": "1324", |  | ||||||
|                 "client-name": f"authentik - {self.user}", |  | ||||||
|                 "drive-path": path, |  | ||||||
|                 "create-drive-path": "true", |  | ||||||
|                 "foo": "true", |  | ||||||
|                 "bar": "6", |  | ||||||
|                 "resize-method": "display-update", |  | ||||||
|                 "level": "token", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -34,7 +34,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|     """SCIM client for groups""" |     """SCIM client for groups""" | ||||||
|  |  | ||||||
|     connection_type = SCIMProviderGroup |     connection_type = SCIMProviderGroup | ||||||
|     connection_attr = "scimprovidergroup_set" |     connection_type_query = "group" | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     def __init__(self, provider: SCIMProvider): |     def __init__(self, provider: SCIMProvider): | ||||||
| @ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: |     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_group = super().to_schema(obj, connection) |         raw_scim_group = super().to_schema( | ||||||
|  |             obj, | ||||||
|  |             connection, | ||||||
|  |             schemas=(SCIM_GROUP_SCHEMA,), | ||||||
|  |         ) | ||||||
|         try: |         try: | ||||||
|             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) |             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|         if SCIM_GROUP_SCHEMA not in scim_group.schemas: |  | ||||||
|             scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA) |  | ||||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas |  | ||||||
|         # are included, even if its just the defaults |  | ||||||
|         scim_group.schemas = list(scim_group.schemas) |  | ||||||
|         if not scim_group.externalId: |         if not scim_group.externalId: | ||||||
|             scim_group.externalId = str(obj.pk) |             scim_group.externalId = str(obj.pk) | ||||||
|  |  | ||||||
|  | |||||||
| @ -18,7 +18,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|     """SCIM client for users""" |     """SCIM client for users""" | ||||||
|  |  | ||||||
|     connection_type = SCIMProviderUser |     connection_type = SCIMProviderUser | ||||||
|     connection_attr = "scimprovideruser_set" |     connection_type_query = "user" | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     def __init__(self, provider: SCIMProvider): |     def __init__(self, provider: SCIMProvider): | ||||||
| @ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: |     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_user = super().to_schema(obj, connection) |         raw_scim_user = super().to_schema( | ||||||
|  |             obj, | ||||||
|  |             connection, | ||||||
|  |             schemas=(SCIM_USER_SCHEMA,), | ||||||
|  |         ) | ||||||
|         try: |         try: | ||||||
|             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) |             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|         if SCIM_USER_SCHEMA not in scim_user.schemas: |  | ||||||
|             scim_user.schemas.insert(0, SCIM_USER_SCHEMA) |  | ||||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas |  | ||||||
|         # are included, even if its just the defaults |  | ||||||
|         scim_user.schemas = list(scim_user.schemas) |  | ||||||
|         if not scim_user.externalId: |         if not scim_user.externalId: | ||||||
|             scim_user.externalId = str(obj.uid) |             scim_user.externalId = str(obj.uid) | ||||||
|         return scim_user |         return scim_user | ||||||
|  | |||||||
| @ -116,7 +116,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = User.objects.prefetch_related("scimprovideruser_set").all().exclude_anonymous() |             base = User.objects.all().exclude_anonymous() | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -126,7 +126,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return Group.objects.prefetch_related("scimprovidergroup_set").all().order_by("pk") |             return Group.objects.all().order_by("pk") | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|  | |||||||
| @ -91,57 +91,6 @@ class SCIMUserTests(TestCase): | |||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @Mocker() |  | ||||||
|     def test_user_create_custom_schema(self, mock: Mocker): |  | ||||||
|         """Test user creation with custom schema""" |  | ||||||
|         schema = SCIMMapping.objects.create( |  | ||||||
|             name="custom_schema", |  | ||||||
|             expression="""return {"schemas": ["foo"]}""", |  | ||||||
|         ) |  | ||||||
|         self.provider.property_mappings.add(schema) |  | ||||||
|         scim_id = generate_id() |  | ||||||
|         mock.get( |  | ||||||
|             "https://localhost/ServiceProviderConfig", |  | ||||||
|             json={}, |  | ||||||
|         ) |  | ||||||
|         mock.post( |  | ||||||
|             "https://localhost/Users", |  | ||||||
|             json={ |  | ||||||
|                 "id": scim_id, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         uid = generate_id() |  | ||||||
|         user = User.objects.create( |  | ||||||
|             username=uid, |  | ||||||
|             name=f"{uid} {uid}", |  | ||||||
|             email=f"{uid}@goauthentik.io", |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(mock.call_count, 2) |  | ||||||
|         self.assertEqual(mock.request_history[0].method, "GET") |  | ||||||
|         self.assertEqual(mock.request_history[1].method, "POST") |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             mock.request_history[1].body, |  | ||||||
|             { |  | ||||||
|                 "schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"], |  | ||||||
|                 "active": True, |  | ||||||
|                 "emails": [ |  | ||||||
|                     { |  | ||||||
|                         "primary": True, |  | ||||||
|                         "type": "other", |  | ||||||
|                         "value": f"{uid}@goauthentik.io", |  | ||||||
|                     } |  | ||||||
|                 ], |  | ||||||
|                 "externalId": user.uid, |  | ||||||
|                 "name": { |  | ||||||
|                     "familyName": uid, |  | ||||||
|                     "formatted": f"{uid} {uid}", |  | ||||||
|                     "givenName": uid, |  | ||||||
|                 }, |  | ||||||
|                 "displayName": f"{uid} {uid}", |  | ||||||
|                 "userName": uid, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @Mocker() |     @Mocker() | ||||||
|     def test_user_create_different_provider_same_id(self, mock: Mocker): |     def test_user_create_different_provider_same_id(self, mock: Mocker): | ||||||
|         """Test user creation with multiple providers that happen |         """Test user creation with multiple providers that happen | ||||||
| @ -435,7 +384,7 @@ class SCIMUserTests(TestCase): | |||||||
|                 self.assertIn(request.method, SAFE_METHODS) |                 self.assertIn(request.method, SAFE_METHODS) | ||||||
|         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() |         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() | ||||||
|         self.assertIsNotNone(task) |         self.assertIsNotNone(task) | ||||||
|         drop_msg = task.messages[3] |         drop_msg = task.messages[2] | ||||||
|         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") |         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["url"]) |         self.assertIsNotNone(drop_msg["attributes"]["url"]) | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["body"]) |         self.assertIsNotNone(drop_msg["attributes"]["body"]) | ||||||
|  | |||||||
| @ -132,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True | |||||||
| TENANT_BASE_SCHEMA = "template" | TENANT_BASE_SCHEMA = "template" | ||||||
| PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") | PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") | ||||||
|  |  | ||||||
| GUARDIAN_MONKEY_PATCH_USER = False | GUARDIAN_MONKEY_PATCH = False | ||||||
|  |  | ||||||
| SPECTACULAR_SETTINGS = { | SPECTACULAR_SETTINGS = { | ||||||
|     "TITLE": "authentik", |     "TITLE": "authentik", | ||||||
| @ -424,7 +424,7 @@ else: | |||||||
|         "BACKEND": "authentik.root.storages.FileStorage", |         "BACKEND": "authentik.root.storages.FileStorage", | ||||||
|         "OPTIONS": { |         "OPTIONS": { | ||||||
|             "location": Path(CONFIG.get("storage.media.file.path")), |             "location": Path(CONFIG.get("storage.media.file.path")), | ||||||
|             "base_url": CONFIG.get("web.path", "/") + "media/", |             "base_url": "/media/", | ||||||
|         }, |         }, | ||||||
|     } |     } | ||||||
|     # Compatibility for apps not supporting top-level STORAGES |     # Compatibility for apps not supporting top-level STORAGES | ||||||
|  | |||||||
| @ -11,7 +11,7 @@ from django.test.runner import DiscoverRunner | |||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sentry import sentry_init | from authentik.lib.sentry import sentry_init | ||||||
| from authentik.root.signals import post_startup, pre_startup, startup | from authentik.root.signals import post_startup, pre_startup, startup | ||||||
| from tests.e2e.utils import get_docker_tag | from tests.docker import get_docker_tag | ||||||
|  |  | ||||||
| # globally set maxDiff to none to show full assert error | # globally set maxDiff to none to show full assert error | ||||||
| TestCase.maxDiff = None | TestCase.maxDiff = None | ||||||
| @ -31,8 +31,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|  |  | ||||||
|         if kwargs.get("randomly_seed", None): |         if kwargs.get("randomly_seed", None): | ||||||
|             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") |             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") | ||||||
|         if kwargs.get("no_capture", False): |  | ||||||
|             self.args.append("--capture=no") |  | ||||||
|  |  | ||||||
|         settings.TEST = True |         settings.TEST = True | ||||||
|         settings.CELERY["task_always_eager"] = True |         settings.CELERY["task_always_eager"] = True | ||||||
| @ -66,11 +64,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|             "Default behaviour: use random.Random().getrandbits(32), so the seed is" |             "Default behaviour: use random.Random().getrandbits(32), so the seed is" | ||||||
|             "different on each run.", |             "different on each run.", | ||||||
|         ) |         ) | ||||||
|         parser.add_argument( |  | ||||||
|             "--no-capture", |  | ||||||
|             action="store_true", |  | ||||||
|             help="Disable any capturing of stdout/stderr during tests.", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def run_tests(self, test_labels, extra_tests=None, **kwargs): |     def run_tests(self, test_labels, extra_tests=None, **kwargs): | ||||||
|         """Run pytest and return the exitcode. |         """Run pytest and return the exitcode. | ||||||
|  | |||||||
| @ -317,7 +317,7 @@ class KerberosSource(Source): | |||||||
|                 usage="accept", name=name, store=self.get_gssapi_store() |                 usage="accept", name=name, store=self.get_gssapi_store() | ||||||
|             ) |             ) | ||||||
|         except gssapi.exceptions.GSSError as exc: |         except gssapi.exceptions.GSSError as exc: | ||||||
|             LOGGER.warning("GSSAPI credentials failure", exc=exc) |             LOGGER.warn("GSSAPI credentials failure", exc=exc) | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "user_object_filter", |             "user_object_filter", | ||||||
|             "group_object_filter", |             "group_object_filter", | ||||||
|             "group_membership_field", |             "group_membership_field", | ||||||
|             "user_membership_attribute", |  | ||||||
|             "object_uniqueness_field", |             "object_uniqueness_field", | ||||||
|             "password_login_update_internal_password", |             "password_login_update_internal_password", | ||||||
|             "sync_users", |             "sync_users", | ||||||
| @ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "sync_parent_group", |             "sync_parent_group", | ||||||
|             "connectivity", |             "connectivity", | ||||||
|             "lookup_groups_from_user", |             "lookup_groups_from_user", | ||||||
|             "delete_not_found_objects", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {"bind_password": {"write_only": True}} |         extra_kwargs = {"bind_password": {"write_only": True}} | ||||||
|  |  | ||||||
| @ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_object_filter", |         "user_object_filter", | ||||||
|         "group_object_filter", |         "group_object_filter", | ||||||
|         "group_membership_field", |         "group_membership_field", | ||||||
|         "user_membership_attribute", |  | ||||||
|         "object_uniqueness_field", |         "object_uniqueness_field", | ||||||
|         "password_login_update_internal_password", |         "password_login_update_internal_password", | ||||||
|         "sync_users", |         "sync_users", | ||||||
| @ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_property_mappings", |         "user_property_mappings", | ||||||
|         "group_property_mappings", |         "group_property_mappings", | ||||||
|         "lookup_groups_from_user", |         "lookup_groups_from_user", | ||||||
|         "delete_not_found_objects", |  | ||||||
|     ] |     ] | ||||||
|     search_fields = ["name", "slug"] |     search_fields = ["name", "slug"] | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|  | |||||||
| @ -1,48 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-28 08:15 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"), |  | ||||||
|         ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="groupldapsourceconnection", |  | ||||||
|             name="validated_by", |  | ||||||
|             field=models.UUIDField( |  | ||||||
|                 blank=True, |  | ||||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", |  | ||||||
|                 null=True, |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="ldapsource", |  | ||||||
|             name="delete_not_found_objects", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="userldapsourceconnection", |  | ||||||
|             name="validated_by", |  | ||||||
|             field=models.UUIDField( |  | ||||||
|                 blank=True, |  | ||||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", |  | ||||||
|                 null=True, |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="groupldapsourceconnection", |  | ||||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="userldapsourceconnection", |  | ||||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,32 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-29 11:22 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource") |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update( |  | ||||||
|         user_membership_attribute="ldap_uniq" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="ldapsource", |  | ||||||
|             name="user_membership_attribute", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 default="distinguishedName", |  | ||||||
|                 help_text="Attribute which matches the value of `group_membership_field`.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop), |  | ||||||
|     ] |  | ||||||
| @ -100,10 +100,6 @@ class LDAPSource(Source): | |||||||
|         default="(objectClass=person)", |         default="(objectClass=person)", | ||||||
|         help_text=_("Consider Objects matching this filter to be Users."), |         help_text=_("Consider Objects matching this filter to be Users."), | ||||||
|     ) |     ) | ||||||
|     user_membership_attribute = models.TextField( |  | ||||||
|         default=LDAP_DISTINGUISHED_NAME, |  | ||||||
|         help_text=_("Attribute which matches the value of `group_membership_field`."), |  | ||||||
|     ) |  | ||||||
|     group_membership_field = models.TextField( |     group_membership_field = models.TextField( | ||||||
|         default="member", help_text=_("Field which contains members of a group.") |         default="member", help_text=_("Field which contains members of a group.") | ||||||
|     ) |     ) | ||||||
| @ -141,14 +137,6 @@ class LDAPSource(Source): | |||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     delete_not_found_objects = models.BooleanField( |  | ||||||
|         default=False, |  | ||||||
|         help_text=_( |  | ||||||
|             "Delete authentik users and groups which were previously supplied by this source, " |  | ||||||
|             "but are now missing from it." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         return "ak-source-ldap-form" |         return "ak-source-ldap-form" | ||||||
| @ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping): | |||||||
|  |  | ||||||
|  |  | ||||||
| class UserLDAPSourceConnection(UserSourceConnection): | class UserLDAPSourceConnection(UserSourceConnection): | ||||||
|     validated_by = models.UUIDField( |  | ||||||
|         null=True, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("User LDAP Source Connection") |         verbose_name = _("User LDAP Source Connection") | ||||||
|         verbose_name_plural = _("User LDAP Source Connections") |         verbose_name_plural = _("User LDAP Source Connections") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["validated_by"]), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupLDAPSourceConnection(GroupSourceConnection): | class GroupLDAPSourceConnection(GroupSourceConnection): | ||||||
|     validated_by = models.UUIDField( |  | ||||||
|         null=True, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Group LDAP Source Connection") |         verbose_name = _("Group LDAP Source Connection") | ||||||
|         verbose_name_plural = _("Group LDAP Source Connections") |         verbose_name_plural = _("Group LDAP Source Connections") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["validated_by"]), |  | ||||||
|         ] |  | ||||||
|  | |||||||
| @ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger | |||||||
| from authentik.core.sources.mapper import SourceMapper | from authentik.core.sources.mapper import SourceMapper | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sync.mapper import PropertyMappingManager | from authentik.lib.sync.mapper import PropertyMappingManager | ||||||
| from authentik.sources.ldap.models import LDAPSource, flatten | from authentik.sources.ldap.models import LDAPSource | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseLDAPSynchronizer: | class BaseLDAPSynchronizer: | ||||||
| @ -77,16 +77,6 @@ class BaseLDAPSynchronizer: | |||||||
|         """Get objects from LDAP, implemented in subclass""" |         """Get objects from LDAP, implemented in subclass""" | ||||||
|         raise NotImplementedError() |         raise NotImplementedError() | ||||||
|  |  | ||||||
|     def get_attributes(self, object): |  | ||||||
|         if "attributes" not in object: |  | ||||||
|             return |  | ||||||
|         return object.get("attributes", {}) |  | ||||||
|  |  | ||||||
|     def get_identifier(self, attributes: dict): |  | ||||||
|         if not attributes.get(self._source.object_uniqueness_field): |  | ||||||
|             return |  | ||||||
|         return flatten(attributes[self._source.object_uniqueness_field]) |  | ||||||
|  |  | ||||||
|     def search_paginator(  # noqa: PLR0913 |     def search_paginator(  # noqa: PLR0913 | ||||||
|         self, |         self, | ||||||
|         search_base, |         search_base, | ||||||
|  | |||||||
| @ -1,61 +0,0 @@ | |||||||
| from collections.abc import Generator |  | ||||||
| from itertools import batched |  | ||||||
| from uuid import uuid4 |  | ||||||
|  |  | ||||||
| from ldap3 import SUBTREE |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group |  | ||||||
| from authentik.sources.ldap.models import GroupLDAPSourceConnection |  | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer |  | ||||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupLDAPForwardDeletion(BaseLDAPSynchronizer): |  | ||||||
|     """Delete LDAP Groups from authentik""" |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def name() -> str: |  | ||||||
|         return "group_deletions" |  | ||||||
|  |  | ||||||
|     def get_objects(self, **kwargs) -> Generator: |  | ||||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("Group syncing is disabled for this Source") |  | ||||||
|             return iter(()) |  | ||||||
|  |  | ||||||
|         uuid = uuid4() |  | ||||||
|         groups = self._source.connection().extend.standard.paged_search( |  | ||||||
|             search_base=self.base_dn_groups, |  | ||||||
|             search_filter=self._source.group_object_filter, |  | ||||||
|             search_scope=SUBTREE, |  | ||||||
|             attributes=[self._source.object_uniqueness_field], |  | ||||||
|             generator=True, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False): |  | ||||||
|             identifiers = [] |  | ||||||
|             for group in batch: |  | ||||||
|                 if not (attributes := self.get_attributes(group)): |  | ||||||
|                     continue |  | ||||||
|                 if identifier := self.get_identifier(attributes): |  | ||||||
|                     identifiers.append(identifier) |  | ||||||
|             GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( |  | ||||||
|                 validated_by=uuid |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return batched( |  | ||||||
|             GroupLDAPSourceConnection.objects.filter(source=self._source) |  | ||||||
|             .exclude(validated_by=uuid) |  | ||||||
|             .values_list("group", flat=True) |  | ||||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), |  | ||||||
|             DELETE_CHUNK_SIZE, |  | ||||||
|             strict=False, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def sync(self, group_pks: tuple) -> int: |  | ||||||
|         """Delete authentik groups""" |  | ||||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("Group syncing is disabled for this Source") |  | ||||||
|             return -1 |  | ||||||
|         self._logger.debug("Deleting groups", group_pks=group_pks) |  | ||||||
|         _, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete() |  | ||||||
|         return deleted_per_type.get(Group._meta.label, 0) |  | ||||||
| @ -1,63 +0,0 @@ | |||||||
| from collections.abc import Generator |  | ||||||
| from itertools import batched |  | ||||||
| from uuid import uuid4 |  | ||||||
|  |  | ||||||
| from ldap3 import SUBTREE |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.sources.ldap.models import UserLDAPSourceConnection |  | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer |  | ||||||
|  |  | ||||||
| UPDATE_CHUNK_SIZE = 10_000 |  | ||||||
| DELETE_CHUNK_SIZE = 50 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserLDAPForwardDeletion(BaseLDAPSynchronizer): |  | ||||||
|     """Delete LDAP Users from authentik""" |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def name() -> str: |  | ||||||
|         return "user_deletions" |  | ||||||
|  |  | ||||||
|     def get_objects(self, **kwargs) -> Generator: |  | ||||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("User syncing is disabled for this Source") |  | ||||||
|             return iter(()) |  | ||||||
|  |  | ||||||
|         uuid = uuid4() |  | ||||||
|         users = self._source.connection().extend.standard.paged_search( |  | ||||||
|             search_base=self.base_dn_users, |  | ||||||
|             search_filter=self._source.user_object_filter, |  | ||||||
|             search_scope=SUBTREE, |  | ||||||
|             attributes=[self._source.object_uniqueness_field], |  | ||||||
|             generator=True, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False): |  | ||||||
|             identifiers = [] |  | ||||||
|             for user in batch: |  | ||||||
|                 if not (attributes := self.get_attributes(user)): |  | ||||||
|                     continue |  | ||||||
|                 if identifier := self.get_identifier(attributes): |  | ||||||
|                     identifiers.append(identifier) |  | ||||||
|             UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( |  | ||||||
|                 validated_by=uuid |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return batched( |  | ||||||
|             UserLDAPSourceConnection.objects.filter(source=self._source) |  | ||||||
|             .exclude(validated_by=uuid) |  | ||||||
|             .values_list("user", flat=True) |  | ||||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), |  | ||||||
|             DELETE_CHUNK_SIZE, |  | ||||||
|             strict=False, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def sync(self, user_pks: tuple) -> int: |  | ||||||
|         """Delete authentik users""" |  | ||||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("User syncing is disabled for this Source") |  | ||||||
|             return -1 |  | ||||||
|         self._logger.debug("Deleting users", user_pks=user_pks) |  | ||||||
|         _, deleted_per_type = User.objects.filter(pk__in=user_pks).delete() |  | ||||||
|         return deleted_per_type.get(User._meta.label, 0) |  | ||||||
| @ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         group_count = 0 |         group_count = 0 | ||||||
|         for group in page_data: |         for group in page_data: | ||||||
|             if (attributes := self.get_attributes(group)) is None: |             if "attributes" not in group: | ||||||
|                 continue |                 continue | ||||||
|  |             attributes = group.get("attributes", {}) | ||||||
|             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) |             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) | ||||||
|             if not (uniq := self.get_identifier(attributes)): |             if not attributes.get(self._source.object_uniqueness_field): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=group_dn, |                     dn=group_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|  |             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -63,19 +63,25 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|                     group_member_dn = group_member.get("dn", {}) |                     group_member_dn = group_member.get("dn", {}) | ||||||
|                     members.append(group_member_dn) |                     members.append(group_member_dn) | ||||||
|             else: |             else: | ||||||
|                 if (attributes := self.get_attributes(group)) is None: |                 if "attributes" not in group: | ||||||
|                     continue |                     continue | ||||||
|                 members = attributes.get(self._source.group_membership_field, []) |                 members = group.get("attributes", {}).get(self._source.group_membership_field, []) | ||||||
|  |  | ||||||
|             ak_group = self.get_group(group) |             ak_group = self.get_group(group) | ||||||
|             if not ak_group: |             if not ak_group: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|  |             membership_mapping_attribute = LDAP_DISTINGUISHED_NAME | ||||||
|  |             if self._source.group_membership_field == "memberUid": | ||||||
|  |                 # If memberships are based on the posixGroup's 'memberUid' | ||||||
|  |                 # attribute we use the RDN instead of the FDN to lookup members. | ||||||
|  |                 membership_mapping_attribute = LDAP_UNIQUENESS | ||||||
|  |  | ||||||
|             users = User.objects.filter( |             users = User.objects.filter( | ||||||
|                 Q(**{f"attributes__{self._source.user_membership_attribute}__in": members}) |                 Q(**{f"attributes__{membership_mapping_attribute}__in": members}) | ||||||
|                 | Q( |                 | Q( | ||||||
|                     **{ |                     **{ | ||||||
|                         f"attributes__{self._source.user_membership_attribute}__isnull": True, |                         f"attributes__{membership_mapping_attribute}__isnull": True, | ||||||
|                         "ak_groups__in": [ak_group], |                         "ak_groups__in": [ak_group], | ||||||
|                     } |                     } | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         user_count = 0 |         user_count = 0 | ||||||
|         for user in page_data: |         for user in page_data: | ||||||
|             if (attributes := self.get_attributes(user)) is None: |             if "attributes" not in user: | ||||||
|                 continue |                 continue | ||||||
|  |             attributes = user.get("attributes", {}) | ||||||
|             user_dn = flatten(user.get("entryDN", user.get("dn"))) |             user_dn = flatten(user.get("entryDN", user.get("dn"))) | ||||||
|             if not (uniq := self.get_identifier(attributes)): |             if not attributes.get(self._source.object_uniqueness_field): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=user_dn, |                     dn=user_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|  |             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class | |||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.sources.ldap.models import LDAPSource | from authentik.sources.ldap.models import LDAPSource | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion |  | ||||||
| from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion |  | ||||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||||
| @ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None): | |||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task( | @CELERY_APP.task( | ||||||
|     # We take the configured hours timeout time by 3.5 as we run user and |     # We take the configured hours timeout time by 2.5 as we run user and | ||||||
|     # group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, |     # group in parallel and then membership, so 2x is to cover the serial tasks, | ||||||
|     # and 0.5x on top of that to give some more leeway |     # and 0.5x on top of that to give some more leeway | ||||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, |     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, |     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||||
| ) | ) | ||||||
| def ldap_sync_single(source_pk: str): | def ldap_sync_single(source_pk: str): | ||||||
|     """Sync a single source""" |     """Sync a single source""" | ||||||
| @ -81,25 +79,6 @@ def ldap_sync_single(source_pk: str): | |||||||
|             group( |             group( | ||||||
|                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), |                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), | ||||||
|             ), |             ), | ||||||
|             # Finally, deletions. What we'd really like to do here is something like |  | ||||||
|             # ``` |  | ||||||
|             # user_identifiers = <ldap query> |  | ||||||
|             # User.objects.exclude( |  | ||||||
|             #     usersourceconnection__identifier__in=user_uniqueness_identifiers, |  | ||||||
|             # ).delete() |  | ||||||
|             # ``` |  | ||||||
|             # This runs into performance issues in large installations. So instead we spread the |  | ||||||
|             # work out into three steps: |  | ||||||
|             # 1. Get every object from the LDAP source. |  | ||||||
|             # 2. Mark every object as "safe" in the database. This is quick, but any error could |  | ||||||
|             #    mean deleting users which should not be deleted, so we do it immediately, in |  | ||||||
|             #    large chunks, and only queue the deletion step afterwards. |  | ||||||
|             # 3. Delete every unmarked item. This is slow, so we spread it over many tasks in |  | ||||||
|             #    small chunks. |  | ||||||
|             group( |  | ||||||
|                 ldap_sync_paginator(source, UserLDAPForwardDeletion) |  | ||||||
|                 + ldap_sync_paginator(source, GroupLDAPForwardDeletion), |  | ||||||
|             ), |  | ||||||
|         ) |         ) | ||||||
|         task() |         task() | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,33 +2,6 @@ | |||||||
|  |  | ||||||
| from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | ||||||
|  |  | ||||||
| # The mock modifies these in place, so we have to define them per string |  | ||||||
| user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io" |  | ||||||
| user_in_slapd_cn = "user_in_slapd_cn" |  | ||||||
| user_in_slapd_uid = "user_in_slapd_uid" |  | ||||||
| user_in_slapd_object_class = "person" |  | ||||||
| user_in_slapd = { |  | ||||||
|     "dn": user_in_slapd_dn, |  | ||||||
|     "attributes": { |  | ||||||
|         "cn": user_in_slapd_cn, |  | ||||||
|         "uid": user_in_slapd_uid, |  | ||||||
|         "objectClass": user_in_slapd_object_class, |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
| group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io" |  | ||||||
| group_in_slapd_cn = "group_in_slapd_cn" |  | ||||||
| group_in_slapd_uid = "group_in_slapd_uid" |  | ||||||
| group_in_slapd_object_class = "groupOfNames" |  | ||||||
| group_in_slapd = { |  | ||||||
|     "dn": group_in_slapd_dn, |  | ||||||
|     "attributes": { |  | ||||||
|         "cn": group_in_slapd_cn, |  | ||||||
|         "uid": group_in_slapd_uid, |  | ||||||
|         "objectClass": group_in_slapd_object_class, |  | ||||||
|         "member": [user_in_slapd["dn"]], |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def mock_slapd_connection(password: str) -> Connection: | def mock_slapd_connection(password: str) -> Connection: | ||||||
|     """Create mock SLAPD connection""" |     """Create mock SLAPD connection""" | ||||||
| @ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection: | |||||||
|             "objectClass": "posixAccount", |             "objectClass": "posixAccount", | ||||||
|         }, |         }, | ||||||
|     ) |     ) | ||||||
|     # Known user and group |  | ||||||
|     connection.strategy.add_entry( |  | ||||||
|         user_in_slapd["dn"], |  | ||||||
|         user_in_slapd["attributes"], |  | ||||||
|     ) |  | ||||||
|     connection.strategy.add_entry( |  | ||||||
|         group_in_slapd["dn"], |  | ||||||
|         group_in_slapd["attributes"], |  | ||||||
|     ) |  | ||||||
|     connection.bind() |     connection.bind() | ||||||
|     return connection |     return connection | ||||||
|  | |||||||
| @ -13,26 +13,14 @@ from authentik.events.system_tasks import TaskStatus | |||||||
| from authentik.lib.generators import generate_id, generate_key | from authentik.lib.generators import generate_id, generate_key | ||||||
| from authentik.lib.sync.outgoing.exceptions import StopSync | from authentik.lib.sync.outgoing.exceptions import StopSync | ||||||
| from authentik.lib.utils.reflection import class_to_path | from authentik.lib.utils.reflection import class_to_path | ||||||
| from authentik.sources.ldap.models import ( | from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping | ||||||
|     GroupLDAPSourceConnection, |  | ||||||
|     LDAPSource, |  | ||||||
|     LDAPSourcePropertyMapping, |  | ||||||
|     UserLDAPSourceConnection, |  | ||||||
| ) |  | ||||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE |  | ||||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||||
| from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | ||||||
| from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | ||||||
| from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | ||||||
| from authentik.sources.ldap.tests.mock_slapd import ( | from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection | ||||||
|     group_in_slapd_cn, |  | ||||||
|     group_in_slapd_uid, |  | ||||||
|     mock_slapd_connection, |  | ||||||
|     user_in_slapd_cn, |  | ||||||
|     user_in_slapd_uid, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| LDAP_PASSWORD = generate_key() | LDAP_PASSWORD = generate_key() | ||||||
|  |  | ||||||
| @ -269,56 +257,12 @@ class LDAPSyncTests(TestCase): | |||||||
|         self.source.group_membership_field = "memberUid" |         self.source.group_membership_field = "memberUid" | ||||||
|         self.source.user_object_filter = "(objectClass=posixAccount)" |         self.source.user_object_filter = "(objectClass=posixAccount)" | ||||||
|         self.source.group_object_filter = "(objectClass=posixGroup)" |         self.source.group_object_filter = "(objectClass=posixGroup)" | ||||||
|         self.source.user_membership_attribute = "uid" |  | ||||||
|         self.source.user_property_mappings.set( |         self.source.user_property_mappings.set( | ||||||
|             [ |  | ||||||
|                 *LDAPSourcePropertyMapping.objects.filter( |  | ||||||
|                     Q(managed__startswith="goauthentik.io/sources/ldap/default") |  | ||||||
|                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") |  | ||||||
|                 ).all(), |  | ||||||
|                 LDAPSourcePropertyMapping.objects.create( |  | ||||||
|                     name="name", |  | ||||||
|                     expression='return {"attributes": {"uid": list_flatten(ldap.get("uid"))}}', |  | ||||||
|                 ), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         self.source.group_property_mappings.set( |  | ||||||
|             LDAPSourcePropertyMapping.objects.filter( |             LDAPSourcePropertyMapping.objects.filter( | ||||||
|                 managed="goauthentik.io/sources/ldap/openldap-cn" |                 Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||||
|  |                 | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             self.source.save() |  | ||||||
|             user_sync = UserLDAPSynchronizer(self.source) |  | ||||||
|             user_sync.sync_full() |  | ||||||
|             group_sync = GroupLDAPSynchronizer(self.source) |  | ||||||
|             group_sync.sync_full() |  | ||||||
|             membership_sync = MembershipLDAPSynchronizer(self.source) |  | ||||||
|             membership_sync.sync_full() |  | ||||||
|             # Test if membership mapping based on memberUid works. |  | ||||||
|             posix_group = Group.objects.filter(name="group-posix").first() |  | ||||||
|             self.assertTrue(posix_group.users.filter(name="user-posix").exists()) |  | ||||||
|  |  | ||||||
|     def test_sync_groups_openldap_posix_group_nonstandard_membership_attribute(self): |  | ||||||
|         """Test posix group sync""" |  | ||||||
|         self.source.object_uniqueness_field = "cn" |  | ||||||
|         self.source.group_membership_field = "memberUid" |  | ||||||
|         self.source.user_object_filter = "(objectClass=posixAccount)" |  | ||||||
|         self.source.group_object_filter = "(objectClass=posixGroup)" |  | ||||||
|         self.source.user_membership_attribute = "cn" |  | ||||||
|         self.source.user_property_mappings.set( |  | ||||||
|             [ |  | ||||||
|                 *LDAPSourcePropertyMapping.objects.filter( |  | ||||||
|                     Q(managed__startswith="goauthentik.io/sources/ldap/default") |  | ||||||
|                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") |  | ||||||
|                 ).all(), |  | ||||||
|                 LDAPSourcePropertyMapping.objects.create( |  | ||||||
|                     name="name", |  | ||||||
|                     expression='return {"attributes": {"cn": list_flatten(ldap.get("cn"))}}', |  | ||||||
|                 ), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         self.source.group_property_mappings.set( |         self.source.group_property_mappings.set( | ||||||
|             LDAPSourcePropertyMapping.objects.filter( |             LDAPSourcePropertyMapping.objects.filter( | ||||||
|                 managed="goauthentik.io/sources/ldap/openldap-cn" |                 managed="goauthentik.io/sources/ldap/openldap-cn" | ||||||
| @ -364,160 +308,3 @@ class LDAPSyncTests(TestCase): | |||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|             ldap_sync_all.delay().get() |             ldap_sync_all.delay().get() | ||||||
|  |  | ||||||
|     def test_user_deletion(self): |  | ||||||
|         """Test user deletion""" |  | ||||||
|         user = User.objects.create_user(username="not-in-the-source") |  | ||||||
|         UserLDAPSourceConnection.objects.create( |  | ||||||
|             user=user, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertFalse(User.objects.filter(username="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_user_deletion_still_in_source(self): |  | ||||||
|         """Test that user is not deleted if it's still in the source""" |  | ||||||
|         username = user_in_slapd_cn |  | ||||||
|         identifier = user_in_slapd_uid |  | ||||||
|         user = User.objects.create_user(username=username) |  | ||||||
|         UserLDAPSourceConnection.objects.create( |  | ||||||
|             user=user, source=self.source, identifier=identifier |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(User.objects.filter(username=username).exists()) |  | ||||||
|  |  | ||||||
|     def test_user_deletion_no_sync(self): |  | ||||||
|         """Test that user is not deleted if sync_users is False""" |  | ||||||
|         user = User.objects.create_user(username="not-in-the-source") |  | ||||||
|         UserLDAPSourceConnection.objects.create( |  | ||||||
|             user=user, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.sync_users = False |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_user_deletion_no_delete(self): |  | ||||||
|         """Test that user is not deleted if delete_not_found_objects is False""" |  | ||||||
|         user = User.objects.create_user(username="not-in-the-source") |  | ||||||
|         UserLDAPSourceConnection.objects.create( |  | ||||||
|             user=user, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_group_deletion(self): |  | ||||||
|         """Test group deletion""" |  | ||||||
|         group = Group.objects.create(name="not-in-the-source") |  | ||||||
|         GroupLDAPSourceConnection.objects.create( |  | ||||||
|             group=group, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertFalse(Group.objects.filter(name="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_group_deletion_still_in_source(self): |  | ||||||
|         """Test that group is not deleted if it's still in the source""" |  | ||||||
|         groupname = group_in_slapd_cn |  | ||||||
|         identifier = group_in_slapd_uid |  | ||||||
|         group = Group.objects.create(name=groupname) |  | ||||||
|         GroupLDAPSourceConnection.objects.create( |  | ||||||
|             group=group, source=self.source, identifier=identifier |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(Group.objects.filter(name=groupname).exists()) |  | ||||||
|  |  | ||||||
|     def test_group_deletion_no_sync(self): |  | ||||||
|         """Test that group is not deleted if sync_groups is False""" |  | ||||||
|         group = Group.objects.create(name="not-in-the-source") |  | ||||||
|         GroupLDAPSourceConnection.objects.create( |  | ||||||
|             group=group, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.sync_groups = False |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_group_deletion_no_delete(self): |  | ||||||
|         """Test that group is not deleted if delete_not_found_objects is False""" |  | ||||||
|         group = Group.objects.create(name="not-in-the-source") |  | ||||||
|         GroupLDAPSourceConnection.objects.create( |  | ||||||
|             group=group, source=self.source, identifier="not-in-the-source" |  | ||||||
|         ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) |  | ||||||
|  |  | ||||||
|     def test_batch_deletion(self): |  | ||||||
|         """Test batch deletion""" |  | ||||||
|         BATCH_SIZE = DELETE_CHUNK_SIZE + 1 |  | ||||||
|         for i in range(BATCH_SIZE): |  | ||||||
|             user = User.objects.create_user(username=f"not-in-the-source-{i}") |  | ||||||
|             group = Group.objects.create(name=f"not-in-the-source-{i}") |  | ||||||
|             group.users.add(user) |  | ||||||
|             UserLDAPSourceConnection.objects.create( |  | ||||||
|                 user=user, source=self.source, identifier=f"not-in-the-source-{i}-user" |  | ||||||
|             ) |  | ||||||
|             GroupLDAPSourceConnection.objects.create( |  | ||||||
|                 group=group, source=self.source, identifier=f"not-in-the-source-{i}-group" |  | ||||||
|             ) |  | ||||||
|         self.source.object_uniqueness_field = "uid" |  | ||||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" |  | ||||||
|         self.source.delete_not_found_objects = True |  | ||||||
|         self.source.save() |  | ||||||
|  |  | ||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |  | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |  | ||||||
|             ldap_sync_all.delay().get() |  | ||||||
|  |  | ||||||
|         self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists()) |  | ||||||
|         self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists()) |  | ||||||
|  | |||||||
| @ -9,7 +9,6 @@ from django.http.response import HttpResponseBadRequest | |||||||
| from django.shortcuts import get_object_or_404, redirect | from django.shortcuts import get_object_or_404, redirect | ||||||
| from django.utils.decorators import method_decorator | from django.utils.decorators import method_decorator | ||||||
| from django.utils.http import urlencode | from django.utils.http import urlencode | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
| from django.views import View | from django.views import View | ||||||
| from django.views.decorators.csrf import csrf_exempt | from django.views.decorators.csrf import csrf_exempt | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -129,9 +128,7 @@ class InitiateView(View): | |||||||
|         # otherwise we default to POST_AUTO, with direct redirect |         # otherwise we default to POST_AUTO, with direct redirect | ||||||
|         if source.binding_type == SAMLBindingTypes.POST: |         if source.binding_type == SAMLBindingTypes.POST: | ||||||
|             injected_stages.append(in_memory_stage(ConsentStageView)) |             injected_stages.append(in_memory_stage(ConsentStageView)) | ||||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _( |             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}" | ||||||
|                 "Continue to {source_name}".format(source_name=source.name) |  | ||||||
|             ) |  | ||||||
|         injected_stages.append(in_memory_stage(AutosubmitStageView)) |         injected_stages.append(in_memory_stage(AutosubmitStageView)) | ||||||
|         return self.handle_login_flow( |         return self.handle_login_flow( | ||||||
|             source, |             source, | ||||||
|  | |||||||
| @ -97,8 +97,7 @@ class GroupsView(SCIMObjectView): | |||||||
|                     self.logger.warning("Invalid group member", exc=exc) |                     self.logger.warning("Invalid group member", exc=exc) | ||||||
|                     continue |                     continue | ||||||
|                 query |= Q(uuid=member.value) |                 query |= Q(uuid=member.value) | ||||||
|             if query: |             group.users.set(User.objects.filter(query)) | ||||||
|                 group.users.set(User.objects.filter(query)) |  | ||||||
|         if not connection: |         if not connection: | ||||||
|             connection, _ = SCIMSourceGroup.objects.get_or_create( |             connection, _ = SCIMSourceGroup.objects.get_or_create( | ||||||
|                 source=self.source, |                 source=self.source, | ||||||
|  | |||||||
| @ -151,7 +151,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | |||||||
|             webauthn_user_verification=UserVerification.PREFERRED, |             webauthn_user_verification=UserVerification.PREFERRED, | ||||||
|         ) |         ) | ||||||
|         stage.webauthn_allowed_device_types.set( |         stage.webauthn_allowed_device_types.set( | ||||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") |             WebAuthnDeviceType.objects.filter( | ||||||
|  |                 description="Android Authenticator with SafetyNet Attestation" | ||||||
|  |             ) | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) |         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||||
| @ -337,7 +339,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | |||||||
|             device_classes=[DeviceClasses.WEBAUTHN], |             device_classes=[DeviceClasses.WEBAUTHN], | ||||||
|         ) |         ) | ||||||
|         stage.webauthn_allowed_device_types.set( |         stage.webauthn_allowed_device_types.set( | ||||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") |             WebAuthnDeviceType.objects.filter( | ||||||
|  |                 description="Android Authenticator with SafetyNet Attestation" | ||||||
|  |             ) | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) |         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||||
|  | |||||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @ -141,7 +141,9 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase): | |||||||
|         """Test registration with restricted devices (fail)""" |         """Test registration with restricted devices (fail)""" | ||||||
|         webauthn_mds_import.delay(force=True).get() |         webauthn_mds_import.delay(force=True).get() | ||||||
|         self.stage.device_type_restrictions.set( |         self.stage.device_type_restrictions.set( | ||||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") |             WebAuthnDeviceType.objects.filter( | ||||||
|  |                 description="Android Authenticator with SafetyNet Attestation" | ||||||
|  |             ) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) |         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) | ||||||
|  | |||||||
| @ -4,8 +4,6 @@ from uuid import uuid4 | |||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
| from rest_framework.exceptions import ValidationError |  | ||||||
| from rest_framework.fields import CharField | from rest_framework.fields import CharField | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| @ -49,11 +47,6 @@ class ConsentChallengeResponse(ChallengeResponse): | |||||||
|     component = CharField(default="ak-stage-consent") |     component = CharField(default="ak-stage-consent") | ||||||
|     token = CharField(required=True) |     token = CharField(required=True) | ||||||
|  |  | ||||||
|     def validate_token(self, token: str): |  | ||||||
|         if token != self.stage.executor.request.session[SESSION_KEY_CONSENT_TOKEN]: |  | ||||||
|             raise ValidationError(_("Invalid consent token, re-showing prompt")) |  | ||||||
|         return token |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConsentStageView(ChallengeStageView): | class ConsentStageView(ChallengeStageView): | ||||||
|     """Simple consent checker.""" |     """Simple consent checker.""" | ||||||
| @ -127,6 +120,9 @@ class ConsentStageView(ChallengeStageView): | |||||||
|         return super().get(request, *args, **kwargs) |         return super().get(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: |     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: | ||||||
|  |         if response.data["token"] != self.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||||
|  |             self.logger.info("Invalid consent token, re-showing prompt") | ||||||
|  |             return self.get(self.request) | ||||||
|         if self.should_always_prompt(): |         if self.should_always_prompt(): | ||||||
|             return self.executor.stage_ok() |             return self.executor.stage_ok() | ||||||
|         current_stage: ConsentStage = self.executor.current_stage |         current_stage: ConsentStage = self.executor.current_stage | ||||||
|  | |||||||
| @ -17,7 +17,6 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN | |||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | ||||||
| from authentik.stages.consent.stage import ( | from authentik.stages.consent.stage import ( | ||||||
|     PLAN_CONTEXT_CONSENT_HEADER, |  | ||||||
|     PLAN_CONTEXT_CONSENT_PERMISSIONS, |     PLAN_CONTEXT_CONSENT_PERMISSIONS, | ||||||
|     SESSION_KEY_CONSENT_TOKEN, |     SESSION_KEY_CONSENT_TOKEN, | ||||||
| ) | ) | ||||||
| @ -34,40 +33,6 @@ class TestConsentStage(FlowTestCase): | |||||||
|             slug=generate_id(), |             slug=generate_id(), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_mismatched_token(self): |  | ||||||
|         """Test incorrect token""" |  | ||||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|         stage = ConsentStage.objects.create(name=generate_id(), mode=ConsentMode.ALWAYS_REQUIRE) |  | ||||||
|         binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2) |  | ||||||
|  |  | ||||||
|         plan = FlowPlan(flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()]) |  | ||||||
|         session = self.client.session |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|         response = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|  |  | ||||||
|         session = self.client.session |  | ||||||
|         response = self.client.post( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), |  | ||||||
|             { |  | ||||||
|                 "token": generate_id(), |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         self.assertStageResponse( |  | ||||||
|             response, |  | ||||||
|             flow, |  | ||||||
|             component="ak-stage-consent", |  | ||||||
|             response_errors={ |  | ||||||
|                 "token": [{"string": "Invalid consent token, re-showing prompt", "code": "invalid"}] |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         self.assertFalse(UserConsent.objects.filter(user=self.user).exists()) |  | ||||||
|  |  | ||||||
|     def test_always_required(self): |     def test_always_required(self): | ||||||
|         """Test always required consent""" |         """Test always required consent""" | ||||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) |         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||||
| @ -193,7 +158,6 @@ class TestConsentStage(FlowTestCase): | |||||||
|             context={ |             context={ | ||||||
|                 PLAN_CONTEXT_APPLICATION: self.application, |                 PLAN_CONTEXT_APPLICATION: self.application, | ||||||
|                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], |                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], | ||||||
|                 PLAN_CONTEXT_CONSENT_HEADER: "test header", |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|  | |||||||
| @ -1,38 +0,0 @@ | |||||||
| from base64 import b64encode |  | ||||||
| from copy import deepcopy |  | ||||||
| from pickle import dumps  # nosec |  | ||||||
|  |  | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
|  |  | ||||||
| from authentik.flows.models import FlowToken, in_memory_stage |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan |  | ||||||
| from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def pickle_flow_token_for_email(plan: FlowPlan): |  | ||||||
|     """Insert a consent stage into the flow plan and pickle it for a FlowToken, |  | ||||||
|     to be sent via Email. This is to prevent automated email scanners, which sometimes |  | ||||||
|     open links in emails in a full browser from breaking the link.""" |  | ||||||
|     plan_copy = deepcopy(plan) |  | ||||||
|     plan_copy.insert_stage(in_memory_stage(EmailTokenRevocationConsentStageView), index=0) |  | ||||||
|     plan_copy.context[PLAN_CONTEXT_CONSENT_HEADER] = _("Continue to confirm this email address.") |  | ||||||
|     data = dumps(plan_copy) |  | ||||||
|     return b64encode(data).decode() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmailTokenRevocationConsentStageView(ConsentStageView): |  | ||||||
|  |  | ||||||
|     def get(self, request, *args, **kwargs): |  | ||||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] |  | ||||||
|         try: |  | ||||||
|             token.refresh_from_db() |  | ||||||
|         except FlowToken.DoesNotExist: |  | ||||||
|             return self.executor.stage_invalid( |  | ||||||
|                 _("Link was already used, please request a new link.") |  | ||||||
|             ) |  | ||||||
|         return super().get(request, *args, **kwargs) |  | ||||||
|  |  | ||||||
|     def challenge_valid(self, response): |  | ||||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] |  | ||||||
|         token.delete() |  | ||||||
|         return super().challenge_valid(response) |  | ||||||
| @ -23,7 +23,6 @@ from authentik.flows.stage import ChallengeStageView | |||||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.lib.utils.time import timedelta_from_string | from authentik.lib.utils.time import timedelta_from_string | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -87,8 +86,7 @@ class EmailStageView(ChallengeStageView): | |||||||
|                 user=pending_user, |                 user=pending_user, | ||||||
|                 identifier=identifier, |                 identifier=identifier, | ||||||
|                 flow=self.executor.flow, |                 flow=self.executor.flow, | ||||||
|                 _plan=pickle_flow_token_for_email(self.executor.plan), |                 _plan=FlowToken.pickle(self.executor.plan), | ||||||
|                 revoke_on_execution=False, |  | ||||||
|             ) |             ) | ||||||
|         token = tokens.first() |         token = tokens.first() | ||||||
|         # Check if token is expired and rotate key if so |         # Check if token is expired and rotate key if so | ||||||
|  | |||||||
| @ -174,5 +174,5 @@ class TestEmailStageSending(FlowTestCase): | |||||||
|                 response = self.client.post(url) |                 response = self.client.post(url) | ||||||
|             response = self.client.post(url) |             response = self.client.post(url) | ||||||
|             self.assertEqual(response.status_code, 200) |             self.assertEqual(response.status_code, 200) | ||||||
|             self.assertGreaterEqual(len(mail.outbox), 1) |             self.assertTrue(len(mail.outbox) >= 1) | ||||||
|             self.assertEqual(mail.outbox[0].subject, "authentik") |             self.assertEqual(mail.outbox[0].subject, "authentik") | ||||||
|  | |||||||
| @ -17,7 +17,6 @@ from authentik.flows.tests import FlowTestCase | |||||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView | from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.stages.consent.stage import SESSION_KEY_CONSENT_TOKEN |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView | from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView | ||||||
|  |  | ||||||
| @ -161,17 +160,6 @@ class TestEmailStage(FlowTestCase): | |||||||
|                     kwargs={"flow_slug": self.flow.slug}, |                     kwargs={"flow_slug": self.flow.slug}, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|             self.assertStageResponse(response, self.flow, component="ak-stage-consent") |  | ||||||
|             response = self.client.post( |  | ||||||
|                 reverse( |  | ||||||
|                     "authentik_api:flow-executor", |  | ||||||
|                     kwargs={"flow_slug": self.flow.slug}, |  | ||||||
|                 ), |  | ||||||
|                 data={ |  | ||||||
|                     "token": self.client.session[SESSION_KEY_CONSENT_TOKEN], |  | ||||||
|                 }, |  | ||||||
|                 follow=True, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             self.assertEqual(response.status_code, 200) |             self.assertEqual(response.status_code, 200) | ||||||
|             self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) |             self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||||
| @ -194,7 +182,6 @@ class TestEmailStage(FlowTestCase): | |||||||
|         # Set flow token user to a different user |         # Set flow token user to a different user | ||||||
|         token: FlowToken = FlowToken.objects.get(user=self.user) |         token: FlowToken = FlowToken.objects.get(user=self.user) | ||||||
|         token.user = create_test_admin_user() |         token.user = create_test_admin_user() | ||||||
|         token.revoke_on_execution = True |  | ||||||
|         token.save() |         token.save() | ||||||
|  |  | ||||||
|         with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): |         with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): | ||||||
|  | |||||||
| @ -11,7 +11,7 @@ from rest_framework.fields import BooleanField, CharField | |||||||
| from authentik.core.models import Session, User | from authentik.core.models import Session, User | ||||||
| from authentik.events.middleware import audit_ignore | from authentik.events.middleware import audit_ignore | ||||||
| from authentik.flows.challenge import ChallengeResponse, WithUserInfoChallenge | from authentik.flows.challenge import ChallengeResponse, WithUserInfoChallenge | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER | from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, PLAN_CONTEXT_SOURCE | ||||||
| from authentik.flows.stage import ChallengeStageView | from authentik.flows.stage import ChallengeStageView | ||||||
| from authentik.lib.utils.time import timedelta_from_string | from authentik.lib.utils.time import timedelta_from_string | ||||||
| from authentik.root.middleware import ClientIPMiddleware | from authentik.root.middleware import ClientIPMiddleware | ||||||
| @ -108,6 +108,10 @@ class UserLoginStageView(ChallengeStageView): | |||||||
|             flow_slug=self.executor.flow.slug, |             flow_slug=self.executor.flow.slug, | ||||||
|             session_duration=delta, |             session_duration=delta, | ||||||
|         ) |         ) | ||||||
|  |         # Only show success message if we don't have a source in the flow | ||||||
|  |         # as sources show their own success messages | ||||||
|  |         if not self.executor.plan.context.get(PLAN_CONTEXT_SOURCE, None): | ||||||
|  |             messages.success(self.request, _("Successfully logged in!")) | ||||||
|         if self.executor.current_stage.terminate_other_sessions: |         if self.executor.current_stage.terminate_other_sessions: | ||||||
|             Session.objects.filter( |             Session.objects.filter( | ||||||
|                 authenticatedsession__user=user, |                 authenticatedsession__user=user, | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|     "$schema": "http://json-schema.org/draft-07/schema", |     "$schema": "http://json-schema.org/draft-07/schema", | ||||||
|     "$id": "https://goauthentik.io/blueprints/schema.json", |     "$id": "https://goauthentik.io/blueprints/schema.json", | ||||||
|     "type": "object", |     "type": "object", | ||||||
|     "title": "authentik 2025.6.0 Blueprint schema", |     "title": "authentik 2025.4.1 Blueprint schema", | ||||||
|     "required": [ |     "required": [ | ||||||
|         "version", |         "version", | ||||||
|         "entries" |         "entries" | ||||||
| @ -3921,46 +3921,6 @@ | |||||||
|                             } |                             } | ||||||
|                         } |                         } | ||||||
|                     }, |                     }, | ||||||
|                     { |  | ||||||
|                         "type": "object", |  | ||||||
|                         "required": [ |  | ||||||
|                             "model", |  | ||||||
|                             "identifiers" |  | ||||||
|                         ], |  | ||||||
|                         "properties": { |  | ||||||
|                             "model": { |  | ||||||
|                                 "const": "authentik_stages_mtls.mutualtlsstage" |  | ||||||
|                             }, |  | ||||||
|                             "id": { |  | ||||||
|                                 "type": "string" |  | ||||||
|                             }, |  | ||||||
|                             "state": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "enum": [ |  | ||||||
|                                     "absent", |  | ||||||
|                                     "present", |  | ||||||
|                                     "created", |  | ||||||
|                                     "must_created" |  | ||||||
|                                 ], |  | ||||||
|                                 "default": "present" |  | ||||||
|                             }, |  | ||||||
|                             "conditions": { |  | ||||||
|                                 "type": "array", |  | ||||||
|                                 "items": { |  | ||||||
|                                     "type": "boolean" |  | ||||||
|                                 } |  | ||||||
|                             }, |  | ||||||
|                             "permissions": { |  | ||||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage_permissions" |  | ||||||
|                             }, |  | ||||||
|                             "attrs": { |  | ||||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage" |  | ||||||
|                             }, |  | ||||||
|                             "identifiers": { |  | ||||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage" |  | ||||||
|                             } |  | ||||||
|                         } |  | ||||||
|                     }, |  | ||||||
|                     { |                     { | ||||||
|                         "type": "object", |                         "type": "object", | ||||||
|                         "required": [ |                         "required": [ | ||||||
| @ -4907,7 +4867,6 @@ | |||||||
|                         "authentik.enterprise.providers.microsoft_entra", |                         "authentik.enterprise.providers.microsoft_entra", | ||||||
|                         "authentik.enterprise.providers.ssf", |                         "authentik.enterprise.providers.ssf", | ||||||
|                         "authentik.enterprise.stages.authenticator_endpoint_gdtc", |                         "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||||
|                         "authentik.enterprise.stages.mtls", |  | ||||||
|                         "authentik.enterprise.stages.source", |                         "authentik.enterprise.stages.source", | ||||||
|                         "authentik.events" |                         "authentik.events" | ||||||
|                     ], |                     ], | ||||||
| @ -5018,7 +4977,6 @@ | |||||||
|                         "authentik_providers_microsoft_entra.microsoftentraprovidermapping", |                         "authentik_providers_microsoft_entra.microsoftentraprovidermapping", | ||||||
|                         "authentik_providers_ssf.ssfprovider", |                         "authentik_providers_ssf.ssfprovider", | ||||||
|                         "authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage", |                         "authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage", | ||||||
|                         "authentik_stages_mtls.mutualtlsstage", |  | ||||||
|                         "authentik_stages_source.sourcestage", |                         "authentik_stages_source.sourcestage", | ||||||
|                         "authentik_events.event", |                         "authentik_events.event", | ||||||
|                         "authentik_events.notificationtransport", |                         "authentik_events.notificationtransport", | ||||||
| @ -7519,11 +7477,6 @@ | |||||||
|                             "authentik_stages_invitation.delete_invitationstage", |                             "authentik_stages_invitation.delete_invitationstage", | ||||||
|                             "authentik_stages_invitation.view_invitation", |                             "authentik_stages_invitation.view_invitation", | ||||||
|                             "authentik_stages_invitation.view_invitationstage", |                             "authentik_stages_invitation.view_invitationstage", | ||||||
|                             "authentik_stages_mtls.add_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.change_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.delete_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.pass_outpost_certificate", |  | ||||||
|                             "authentik_stages_mtls.view_mutualtlsstage", |  | ||||||
|                             "authentik_stages_password.add_passwordstage", |                             "authentik_stages_password.add_passwordstage", | ||||||
|                             "authentik_stages_password.change_passwordstage", |                             "authentik_stages_password.change_passwordstage", | ||||||
|                             "authentik_stages_password.delete_passwordstage", |                             "authentik_stages_password.delete_passwordstage", | ||||||
| @ -8147,12 +8100,6 @@ | |||||||
|                     "title": "Group membership field", |                     "title": "Group membership field", | ||||||
|                     "description": "Field which contains members of a group." |                     "description": "Field which contains members of a group." | ||||||
|                 }, |                 }, | ||||||
|                 "user_membership_attribute": { |  | ||||||
|                     "type": "string", |  | ||||||
|                     "minLength": 1, |  | ||||||
|                     "title": "User membership attribute", |  | ||||||
|                     "description": "Attribute which matches the value of `group_membership_field`." |  | ||||||
|                 }, |  | ||||||
|                 "object_uniqueness_field": { |                 "object_uniqueness_field": { | ||||||
|                     "type": "string", |                     "type": "string", | ||||||
|                     "minLength": 1, |                     "minLength": 1, | ||||||
| @ -8186,11 +8133,6 @@ | |||||||
|                     "type": "boolean", |                     "type": "boolean", | ||||||
|                     "title": "Lookup groups from user", |                     "title": "Lookup groups from user", | ||||||
|                     "description": "Lookup group membership based on a user attribute instead of a group attribute. This allows nested group resolution on systems like FreeIPA and Active Directory" |                     "description": "Lookup group membership based on a user attribute instead of a group attribute. This allows nested group resolution on systems like FreeIPA and Active Directory" | ||||||
|                 }, |  | ||||||
|                 "delete_not_found_objects": { |  | ||||||
|                     "type": "boolean", |  | ||||||
|                     "title": "Delete not found objects", |  | ||||||
|                     "description": "Delete authentik users and groups which were previously supplied by this source, but are now missing from it." |  | ||||||
|                 } |                 } | ||||||
|             }, |             }, | ||||||
|             "required": [] |             "required": [] | ||||||
| @ -13480,16 +13422,6 @@ | |||||||
|                     "title": "Web certificate", |                     "title": "Web certificate", | ||||||
|                     "description": "Web Certificate used by the authentik Core webserver." |                     "description": "Web Certificate used by the authentik Core webserver." | ||||||
|                 }, |                 }, | ||||||
|                 "client_certificates": { |  | ||||||
|                     "type": "array", |  | ||||||
|                     "items": { |  | ||||||
|                         "type": "string", |  | ||||||
|                         "format": "uuid", |  | ||||||
|                         "description": "Certificates used for client authentication." |  | ||||||
|                     }, |  | ||||||
|                     "title": "Client certificates", |  | ||||||
|                     "description": "Certificates used for client authentication." |  | ||||||
|                 }, |  | ||||||
|                 "attributes": { |                 "attributes": { | ||||||
|                     "type": "object", |                     "type": "object", | ||||||
|                     "additionalProperties": true, |                     "additionalProperties": true, | ||||||
| @ -14253,11 +14185,6 @@ | |||||||
|                             "authentik_stages_invitation.delete_invitationstage", |                             "authentik_stages_invitation.delete_invitationstage", | ||||||
|                             "authentik_stages_invitation.view_invitation", |                             "authentik_stages_invitation.view_invitation", | ||||||
|                             "authentik_stages_invitation.view_invitationstage", |                             "authentik_stages_invitation.view_invitationstage", | ||||||
|                             "authentik_stages_mtls.add_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.change_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.delete_mutualtlsstage", |  | ||||||
|                             "authentik_stages_mtls.pass_outpost_certificate", |  | ||||||
|                             "authentik_stages_mtls.view_mutualtlsstage", |  | ||||||
|                             "authentik_stages_password.add_passwordstage", |                             "authentik_stages_password.add_passwordstage", | ||||||
|                             "authentik_stages_password.change_passwordstage", |                             "authentik_stages_password.change_passwordstage", | ||||||
|                             "authentik_stages_password.delete_passwordstage", |                             "authentik_stages_password.delete_passwordstage", | ||||||
| @ -15161,161 +15088,6 @@ | |||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         "model_authentik_stages_mtls.mutualtlsstage": { |  | ||||||
|             "type": "object", |  | ||||||
|             "properties": { |  | ||||||
|                 "name": { |  | ||||||
|                     "type": "string", |  | ||||||
|                     "minLength": 1, |  | ||||||
|                     "title": "Name" |  | ||||||
|                 }, |  | ||||||
|                 "flow_set": { |  | ||||||
|                     "type": "array", |  | ||||||
|                     "items": { |  | ||||||
|                         "type": "object", |  | ||||||
|                         "properties": { |  | ||||||
|                             "name": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "minLength": 1, |  | ||||||
|                                 "title": "Name" |  | ||||||
|                             }, |  | ||||||
|                             "slug": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "maxLength": 50, |  | ||||||
|                                 "minLength": 1, |  | ||||||
|                                 "pattern": "^[-a-zA-Z0-9_]+$", |  | ||||||
|                                 "title": "Slug", |  | ||||||
|                                 "description": "Visible in the URL." |  | ||||||
|                             }, |  | ||||||
|                             "title": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "minLength": 1, |  | ||||||
|                                 "title": "Title", |  | ||||||
|                                 "description": "Shown as the Title in Flow pages." |  | ||||||
|                             }, |  | ||||||
|                             "designation": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "enum": [ |  | ||||||
|                                     "authentication", |  | ||||||
|                                     "authorization", |  | ||||||
|                                     "invalidation", |  | ||||||
|                                     "enrollment", |  | ||||||
|                                     "unenrollment", |  | ||||||
|                                     "recovery", |  | ||||||
|                                     "stage_configuration" |  | ||||||
|                                 ], |  | ||||||
|                                 "title": "Designation", |  | ||||||
|                                 "description": "Decides what this Flow is used for. For example, the Authentication flow is redirect to when an un-authenticated user visits authentik." |  | ||||||
|                             }, |  | ||||||
|                             "policy_engine_mode": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "enum": [ |  | ||||||
|                                     "all", |  | ||||||
|                                     "any" |  | ||||||
|                                 ], |  | ||||||
|                                 "title": "Policy engine mode" |  | ||||||
|                             }, |  | ||||||
|                             "compatibility_mode": { |  | ||||||
|                                 "type": "boolean", |  | ||||||
|                                 "title": "Compatibility mode", |  | ||||||
|                                 "description": "Enable compatibility mode, increases compatibility with password managers on mobile devices." |  | ||||||
|                             }, |  | ||||||
|                             "layout": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "enum": [ |  | ||||||
|                                     "stacked", |  | ||||||
|                                     "content_left", |  | ||||||
|                                     "content_right", |  | ||||||
|                                     "sidebar_left", |  | ||||||
|                                     "sidebar_right" |  | ||||||
|                                 ], |  | ||||||
|                                 "title": "Layout" |  | ||||||
|                             }, |  | ||||||
|                             "denied_action": { |  | ||||||
|                                 "type": "string", |  | ||||||
|                                 "enum": [ |  | ||||||
|                                     "message_continue", |  | ||||||
|                                     "message", |  | ||||||
|                                     "continue" |  | ||||||
|                                 ], |  | ||||||
|                                 "title": "Denied action", |  | ||||||
|                                 "description": "Configure what should happen when a flow denies access to a user." |  | ||||||
|                             } |  | ||||||
|                         }, |  | ||||||
|                         "required": [ |  | ||||||
|                             "name", |  | ||||||
|                             "slug", |  | ||||||
|                             "title", |  | ||||||
|                             "designation" |  | ||||||
|                         ] |  | ||||||
|                     }, |  | ||||||
|                     "title": "Flow set" |  | ||||||
|                 }, |  | ||||||
|                 "mode": { |  | ||||||
|                     "type": "string", |  | ||||||
|                     "enum": [ |  | ||||||
|                         "optional", |  | ||||||
|                         "required" |  | ||||||
|                     ], |  | ||||||
|                     "title": "Mode" |  | ||||||
|                 }, |  | ||||||
|                 "certificate_authorities": { |  | ||||||
|                     "type": "array", |  | ||||||
|                     "items": { |  | ||||||
|                         "type": "string", |  | ||||||
|                         "format": "uuid", |  | ||||||
|                         "description": "Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`." |  | ||||||
|                     }, |  | ||||||
|                     "title": "Certificate authorities", |  | ||||||
|                     "description": "Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`." |  | ||||||
|                 }, |  | ||||||
|                 "cert_attribute": { |  | ||||||
|                     "type": "string", |  | ||||||
|                     "enum": [ |  | ||||||
|                         "subject", |  | ||||||
|                         "common_name", |  | ||||||
|                         "email" |  | ||||||
|                     ], |  | ||||||
|                     "title": "Cert attribute" |  | ||||||
|                 }, |  | ||||||
|                 "user_attribute": { |  | ||||||
|                     "type": "string", |  | ||||||
|                     "enum": [ |  | ||||||
|                         "username", |  | ||||||
|                         "email" |  | ||||||
|                     ], |  | ||||||
|                     "title": "User attribute" |  | ||||||
|                 } |  | ||||||
|             }, |  | ||||||
|             "required": [] |  | ||||||
|         }, |  | ||||||
|         "model_authentik_stages_mtls.mutualtlsstage_permissions": { |  | ||||||
|             "type": "array", |  | ||||||
|             "items": { |  | ||||||
|                 "type": "object", |  | ||||||
|                 "required": [ |  | ||||||
|                     "permission" |  | ||||||
|                 ], |  | ||||||
|                 "properties": { |  | ||||||
|                     "permission": { |  | ||||||
|                         "type": "string", |  | ||||||
|                         "enum": [ |  | ||||||
|                             "pass_outpost_certificate", |  | ||||||
|                             "add_mutualtlsstage", |  | ||||||
|                             "change_mutualtlsstage", |  | ||||||
|                             "delete_mutualtlsstage", |  | ||||||
|                             "view_mutualtlsstage" |  | ||||||
|                         ] |  | ||||||
|                     }, |  | ||||||
|                     "user": { |  | ||||||
|                         "type": "integer" |  | ||||||
|                     }, |  | ||||||
|                     "role": { |  | ||||||
|                         "type": "string" |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         "model_authentik_stages_source.sourcestage": { |         "model_authentik_stages_source.sourcestage": { | ||||||
|             "type": "object", |             "type": "object", | ||||||
|             "properties": { |             "properties": { | ||||||
|  | |||||||
| @ -19,6 +19,7 @@ import ( | |||||||
| 	sentryutils "goauthentik.io/internal/utils/sentry" | 	sentryutils "goauthentik.io/internal/utils/sentry" | ||||||
| 	webutils "goauthentik.io/internal/utils/web" | 	webutils "goauthentik.io/internal/utils/web" | ||||||
| 	"goauthentik.io/internal/web" | 	"goauthentik.io/internal/web" | ||||||
|  | 	"goauthentik.io/internal/web/brand_tls" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| var rootCmd = &cobra.Command{ | var rootCmd = &cobra.Command{ | ||||||
| @ -66,12 +67,12 @@ var rootCmd = &cobra.Command{ | |||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		ws := web.NewWebServer() | 		ws := web.NewWebServer() | ||||||
| 		ws.Core().AddHealthyCallback(func() { | 		ws.Core().HealthyCallback = func() { | ||||||
| 			if config.Get().Outposts.DisableEmbeddedOutpost { | 			if config.Get().Outposts.DisableEmbeddedOutpost { | ||||||
| 				return | 				return | ||||||
| 			} | 			} | ||||||
| 			go attemptProxyStart(ws, u) | 			go attemptProxyStart(ws, u) | ||||||
| 		}) | 		} | ||||||
| 		ws.Start() | 		ws.Start() | ||||||
| 		<-ex | 		<-ex | ||||||
| 		l.Info("shutting down webserver") | 		l.Info("shutting down webserver") | ||||||
| @ -94,8 +95,13 @@ func attemptProxyStart(ws *web.WebServer, u *url.URL) { | |||||||
| 			} | 			} | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  | 		// Init brand_tls here too since it requires an API Client, | ||||||
|  | 		// so we just reuse the same one as the outpost uses | ||||||
|  | 		tw := brand_tls.NewWatcher(ac.Client) | ||||||
|  | 		go tw.Start() | ||||||
|  | 		ws.BrandTLS = tw | ||||||
| 		ac.AddRefreshHandler(func() { | 		ac.AddRefreshHandler(func() { | ||||||
| 			ws.BrandTLS.Check() | 			tw.Check() | ||||||
| 		}) | 		}) | ||||||
|  |  | ||||||
| 		srv := proxyv2.NewProxyServer(ac) | 		srv := proxyv2.NewProxyServer(ac) | ||||||
|  | |||||||
| @ -31,7 +31,7 @@ services: | |||||||
|     volumes: |     volumes: | ||||||
|       - redis:/data |       - redis:/data | ||||||
|   server: |   server: | ||||||
|     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.0} |     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.1} | ||||||
|     restart: unless-stopped |     restart: unless-stopped | ||||||
|     command: server |     command: server | ||||||
|     environment: |     environment: | ||||||
| @ -55,7 +55,7 @@ services: | |||||||
|       redis: |       redis: | ||||||
|         condition: service_healthy |         condition: service_healthy | ||||||
|   worker: |   worker: | ||||||
|     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.0} |     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.1} | ||||||
|     restart: unless-stopped |     restart: unless-stopped | ||||||
|     command: worker |     command: worker | ||||||
|     environment: |     environment: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								go.mod
									
									
									
									
									
								
							| @ -21,13 +21,13 @@ require ( | |||||||
| 	github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484 | 	github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484 | ||||||
| 	github.com/pires/go-proxyproto v0.8.1 | 	github.com/pires/go-proxyproto v0.8.1 | ||||||
| 	github.com/prometheus/client_golang v1.22.0 | 	github.com/prometheus/client_golang v1.22.0 | ||||||
| 	github.com/redis/go-redis/v9 v9.9.0 | 	github.com/redis/go-redis/v9 v9.8.0 | ||||||
| 	github.com/sethvargo/go-envconfig v1.3.0 | 	github.com/sethvargo/go-envconfig v1.3.0 | ||||||
| 	github.com/sirupsen/logrus v1.9.3 | 	github.com/sirupsen/logrus v1.9.3 | ||||||
| 	github.com/spf13/cobra v1.9.1 | 	github.com/spf13/cobra v1.9.1 | ||||||
| 	github.com/stretchr/testify v1.10.0 | 	github.com/stretchr/testify v1.10.0 | ||||||
| 	github.com/wwt/guac v1.3.2 | 	github.com/wwt/guac v1.3.2 | ||||||
| 	goauthentik.io/api/v3 v3.2025041.4 | 	goauthentik.io/api/v3 v3.2025041.1 | ||||||
| 	golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab | 	golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab | ||||||
| 	golang.org/x/oauth2 v0.30.0 | 	golang.org/x/oauth2 v0.30.0 | ||||||
| 	golang.org/x/sync v0.14.0 | 	golang.org/x/sync v0.14.0 | ||||||
|  | |||||||
							
								
								
									
										8
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								go.sum
									
									
									
									
									
								
							| @ -245,8 +245,8 @@ github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ | |||||||
| github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I= | github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I= | ||||||
| github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= | github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= | ||||||
| github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= | github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= | ||||||
| github.com/redis/go-redis/v9 v9.9.0 h1:URbPQ4xVQSQhZ27WMQVmZSo3uT3pL+4IdHVcYq2nVfM= | github.com/redis/go-redis/v9 v9.8.0 h1:q3nRvjrlge/6UD7eTu/DSg2uYiU2mCL0G/uzBWqhicI= | ||||||
| github.com/redis/go-redis/v9 v9.9.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw= | github.com/redis/go-redis/v9 v9.8.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw= | ||||||
| github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= | github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= | ||||||
| github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= | github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= | ||||||
| github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= | github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= | ||||||
| @ -290,8 +290,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y | |||||||
| go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= | go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= | ||||||
| go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= | go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= | ||||||
| go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= | go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= | ||||||
| goauthentik.io/api/v3 v3.2025041.4 h1:cGqzWYnUHrWDoaXWDpIL/kWnX9sFrIhkYDye0P0OEAo= | goauthentik.io/api/v3 v3.2025041.1 h1:GAN6AoTmfnCGgx1SyM07jP4/LR/T3rkTEyShSBd3Co8= | ||||||
| goauthentik.io/api/v3 v3.2025041.4/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw= | goauthentik.io/api/v3 v3.2025041.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
| golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
|  | |||||||
| @ -21,16 +21,12 @@ func FullVersion() string { | |||||||
| 	return ver | 	return ver | ||||||
| } | } | ||||||
|  |  | ||||||
| func UserAgentOutpost() string { | func OutpostUserAgent() string { | ||||||
| 	return fmt.Sprintf("goauthentik.io/outpost/%s", FullVersion()) | 	return fmt.Sprintf("goauthentik.io/outpost/%s", FullVersion()) | ||||||
| } | } | ||||||
|  |  | ||||||
| func UserAgentIPC() string { |  | ||||||
| 	return fmt.Sprintf("goauthentik.io/ipc/%s", FullVersion()) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func UserAgent() string { | func UserAgent() string { | ||||||
| 	return fmt.Sprintf("authentik@%s", FullVersion()) | 	return fmt.Sprintf("authentik@%s", FullVersion()) | ||||||
| } | } | ||||||
|  |  | ||||||
| const VERSION = "2025.6.0" | const VERSION = "2025.4.1" | ||||||
|  | |||||||
| @ -18,8 +18,8 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| type GoUnicorn struct { | type GoUnicorn struct { | ||||||
| 	Healthcheck      func() bool | 	Healthcheck     func() bool | ||||||
| 	healthyCallbacks []func() | 	HealthyCallback func() | ||||||
|  |  | ||||||
| 	log     *log.Entry | 	log     *log.Entry | ||||||
| 	p       *exec.Cmd | 	p       *exec.Cmd | ||||||
| @ -32,12 +32,12 @@ type GoUnicorn struct { | |||||||
| func New(healthcheck func() bool) *GoUnicorn { | func New(healthcheck func() bool) *GoUnicorn { | ||||||
| 	logger := log.WithField("logger", "authentik.router.unicorn") | 	logger := log.WithField("logger", "authentik.router.unicorn") | ||||||
| 	g := &GoUnicorn{ | 	g := &GoUnicorn{ | ||||||
| 		Healthcheck:      healthcheck, | 		Healthcheck:     healthcheck, | ||||||
| 		log:              logger, | 		log:             logger, | ||||||
| 		started:          false, | 		started:         false, | ||||||
| 		killed:           false, | 		killed:          false, | ||||||
| 		alive:            false, | 		alive:           false, | ||||||
| 		healthyCallbacks: []func(){}, | 		HealthyCallback: func() {}, | ||||||
| 	} | 	} | ||||||
| 	g.initCmd() | 	g.initCmd() | ||||||
| 	c := make(chan os.Signal, 1) | 	c := make(chan os.Signal, 1) | ||||||
| @ -79,10 +79,6 @@ func (g *GoUnicorn) initCmd() { | |||||||
| 	g.p.Stderr = os.Stderr | 	g.p.Stderr = os.Stderr | ||||||
| } | } | ||||||
|  |  | ||||||
| func (g *GoUnicorn) AddHealthyCallback(cb func()) { |  | ||||||
| 	g.healthyCallbacks = append(g.healthyCallbacks, cb) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (g *GoUnicorn) IsRunning() bool { | func (g *GoUnicorn) IsRunning() bool { | ||||||
| 	return g.alive | 	return g.alive | ||||||
| } | } | ||||||
| @ -105,9 +101,7 @@ func (g *GoUnicorn) healthcheck() { | |||||||
| 		if g.Healthcheck() { | 		if g.Healthcheck() { | ||||||
| 			g.alive = true | 			g.alive = true | ||||||
| 			g.log.Debug("backend is alive, backing off with healthchecks") | 			g.log.Debug("backend is alive, backing off with healthchecks") | ||||||
| 			for _, cb := range g.healthyCallbacks { | 			g.HealthyCallback() | ||||||
| 				cb() |  | ||||||
| 			} |  | ||||||
| 			break | 			break | ||||||
| 		} | 		} | ||||||
| 		g.log.Debug("backend not alive yet") | 		g.log.Debug("backend not alive yet") | ||||||
|  | |||||||
| @ -62,7 +62,7 @@ func NewAPIController(akURL url.URL, token string) *APIController { | |||||||
| 	apiConfig.Scheme = akURL.Scheme | 	apiConfig.Scheme = akURL.Scheme | ||||||
| 	apiConfig.HTTPClient = &http.Client{ | 	apiConfig.HTTPClient = &http.Client{ | ||||||
| 		Transport: web.NewUserAgentTransport( | 		Transport: web.NewUserAgentTransport( | ||||||
| 			constants.UserAgentOutpost(), | 			constants.OutpostUserAgent(), | ||||||
| 			web.NewTracingTransport( | 			web.NewTracingTransport( | ||||||
| 				rsp.Context(), | 				rsp.Context(), | ||||||
| 				GetTLSTransport(), | 				GetTLSTransport(), | ||||||
|  | |||||||
| @ -38,7 +38,7 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID string) error { | |||||||
|  |  | ||||||
| 	header := http.Header{ | 	header := http.Header{ | ||||||
| 		"Authorization": []string{authHeader}, | 		"Authorization": []string{authHeader}, | ||||||
| 		"User-Agent":    []string{constants.UserAgentOutpost()}, | 		"User-Agent":    []string{constants.OutpostUserAgent()}, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	dialer := websocket.Dialer{ | 	dialer := websocket.Dialer{ | ||||||
|  | |||||||
| @ -3,8 +3,6 @@ package ak | |||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"crypto/tls" | 	"crypto/tls" | ||||||
| 	"crypto/x509" |  | ||||||
| 	"encoding/pem" |  | ||||||
|  |  | ||||||
| 	log "github.com/sirupsen/logrus" | 	log "github.com/sirupsen/logrus" | ||||||
| 	"goauthentik.io/api/v3" | 	"goauthentik.io/api/v3" | ||||||
| @ -69,34 +67,16 @@ func (cs *CryptoStore) Fetch(uuid string) error { | |||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	var tcert tls.Certificate | 	x509cert, err := tls.X509KeyPair([]byte(cert.Data), []byte(key.Data)) | ||||||
| 	if key.Data != "" { | 	if err != nil { | ||||||
| 		x509cert, err := tls.X509KeyPair([]byte(cert.Data), []byte(key.Data)) | 		return err | ||||||
| 		if err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} |  | ||||||
| 		tcert = x509cert |  | ||||||
| 	} else { |  | ||||||
| 		p, _ := pem.Decode([]byte(cert.Data)) |  | ||||||
| 		x509cert, err := x509.ParseCertificate(p.Bytes) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} |  | ||||||
| 		tcert = tls.Certificate{ |  | ||||||
| 			Certificate: [][]byte{x509cert.Raw}, |  | ||||||
| 			Leaf:        x509cert, |  | ||||||
| 		} |  | ||||||
| 	} | 	} | ||||||
| 	cs.certificates[uuid] = &tcert | 	cs.certificates[uuid] = &x509cert | ||||||
| 	cs.fingerprints[uuid] = cfp | 	cs.fingerprints[uuid] = cfp | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (cs *CryptoStore) Get(uuid string) *tls.Certificate { | func (cs *CryptoStore) Get(uuid string) *tls.Certificate { | ||||||
| 	c, ok := cs.certificates[uuid] |  | ||||||
| 	if ok { |  | ||||||
| 		return c |  | ||||||
| 	} |  | ||||||
| 	err := cs.Fetch(uuid) | 	err := cs.Fetch(uuid) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		cs.log.WithError(err).Warning("failed to fetch certificate") | 		cs.log.WithError(err).Warning("failed to fetch certificate") | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	