Compare commits
	
		
			3 Commits
		
	
	
		
			endpoints
			...
			esbuild-ts
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 7c69add264 | |||
| 248fcd5d7f | |||
| 2c64e3f9ba | 
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup docker cache | ||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 | ||||
|       uses: ScribeMD/docker-cache@0.5.0 | ||||
|       with: | ||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||
|     - name: Setup dependencies | ||||
|  | ||||
							
								
								
									
										14
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | ||||
|   - package-ecosystem: npm | ||||
|     directories: | ||||
|       - "/web" | ||||
|       - "/web/packages/sfe" | ||||
|       - "/web/packages/core" | ||||
|       - "/web/packages/esbuild-plugin-live-reload" | ||||
|       - "/packages/prettier-config" | ||||
|       - "/packages/tsconfig" | ||||
|       - "/packages/docusaurus-config" | ||||
|       - "/packages/eslint-config" | ||||
|       - "/web/sfe" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
| @ -74,9 +68,6 @@ updates: | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/website" | ||||
|     schedule: | ||||
| @ -97,9 +88,6 @@ updates: | ||||
|           - "swc-*" | ||||
|           - "lightningcss*" | ||||
|           - "@rspack/binding*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -117,7 +116,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|  | ||||
| @ -37,7 +37,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           body: ${{ steps.compress.outputs.markdown }} | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | ||||
|       - packages/eslint-config/** | ||||
|       - packages/prettier-config/** | ||||
|       - packages/tsconfig/** | ||||
|       - web/packages/esbuild-plugin-live-reload/** | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   publish: | ||||
| @ -17,28 +16,27 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         package: | ||||
|           - packages/docusaurus-config | ||||
|           - packages/eslint-config | ||||
|           - packages/prettier-config | ||||
|           - packages/tsconfig | ||||
|           - web/packages/esbuild-plugin-live-reload | ||||
|           - docusaurus-config | ||||
|           - eslint-config | ||||
|           - prettier-config | ||||
|           - tsconfig | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 2 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.package }}/package.json | ||||
|           node-version-file: packages/${{ matrix.package }}/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Get changed files | ||||
|         id: changed-files | ||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||
|         with: | ||||
|           files: | | ||||
|             ${{ matrix.package }}/package.json | ||||
|             packages/${{ matrix.package }}/package.json | ||||
|       - name: Publish package | ||||
|         if: steps.changed-files.outputs.any_changed == 'true' | ||||
|         working-directory: ${{ matrix.package }} | ||||
|         working-directory: packages/${{ matrix.package}} | ||||
|         run: | | ||||
|           npm ci | ||||
|           npm run build | ||||
|  | ||||
| @ -52,6 +52,3 @@ jobs: | ||||
|           body: "core, web: update translations" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|  | ||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
| @ -26,13 +25,23 @@ jobs: | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") | ||||
|           title=$(curl -q -L \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||
|       - name: Rename | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies | ||||
|           curl -L \ | ||||
|             -X PATCH \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||
|             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| @ -20,7 +20,7 @@ COPY ./SECURITY.md /work/ | ||||
| RUN npm run build-bundled | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| @ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.8 AS uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.4 AS uv | ||||
| # Stage 6: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||
|  | ||||
|  | ||||
							
								
								
									
										51
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,7 +1,6 @@ | ||||
| .PHONY: gen dev-reset all clean test web website | ||||
|  | ||||
| SHELL := /usr/bin/env bash | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e | ||||
| PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| @ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = gen-ts-api | ||||
| GEN_API_PY = gen-py-api | ||||
| GEN_API_GO = gen-go-api | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| @ -118,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | ||||
| 	npx prettier --write diff.md | ||||
|  | ||||
| gen-clean-ts:  ## Remove generated API client for Typescript | ||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | ||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | ||||
| 	rm -rf ./${GEN_API_TS}/ | ||||
| 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||
|  | ||||
| gen-clean-go:  ## Remove generated API client for Go | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	make -C ${PWD}/${GEN_API_GO} clean | ||||
| else | ||||
| 	rm -rf ${PWD}/${GEN_API_GO} | ||||
| endif | ||||
| 	rm -rf ./${GEN_API_GO}/ | ||||
|  | ||||
| gen-clean-py:  ## Remove generated API client for Python | ||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | ||||
| 	rm -rf ./${GEN_API_PY}/ | ||||
|  | ||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||
|  | ||||
| @ -147,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	mkdir -p web/node_modules/@goauthentik/api | ||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | ||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
| 	cd ./${GEN_API_TS} && npm i | ||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
|  | ||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 	docker run \ | ||||
| @ -162,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	pip install ./${GEN_API_PY} | ||||
|  | ||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | ||||
| else | ||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | ||||
| endif | ||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | ||||
| 	make -C ${PWD}/${GEN_API_GO} build | ||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||
| 	cp schema.yml ./${GEN_API_GO}/ | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g go \ | ||||
| 		-o /local/ \ | ||||
| 		-c /local/config.yaml | ||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||
|  | ||||
| gen-dev-config:  ## Generate a local development config file | ||||
| 	uv run scripts/generate_config.py | ||||
| @ -243,7 +244,7 @@ docker:  ## Build a docker image of the current source tree | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| test-docker: | ||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | ||||
| 	BUILD=true ./scripts/test_docker.sh | ||||
|  | ||||
| ######################### | ||||
| ## CI | ||||
|  | ||||
| @ -1,12 +1,9 @@ | ||||
| """API Authentication""" | ||||
|  | ||||
| from hmac import compare_digest | ||||
| from pathlib import Path | ||||
| from tempfile import gettempdir | ||||
| from typing import Any | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.middleware import CTX_AUTH_VIA | ||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | ||||
| from authentik.core.models import Token, TokenIntents, User | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| _tmp = Path(gettempdir()) | ||||
| try: | ||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: | ||||
|         ipc_key = _f.read() | ||||
| except OSError: | ||||
|     ipc_key = None | ||||
|  | ||||
|  | ||||
| def validate_auth(header: bytes) -> str | None: | ||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("secret_key") | ||||
|         return user | ||||
|     # then try to auth via secret key (for embedded outpost/etc) | ||||
|     user = token_ipc(auth_credentials) | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("ipc") | ||||
|         return user | ||||
|     raise AuthenticationFailed("Token invalid/expired") | ||||
|  | ||||
|  | ||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | ||||
|     return outpost.user | ||||
|  | ||||
|  | ||||
| class IPCUser(AnonymousUser): | ||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" | ||||
|  | ||||
|     username = "authentik:system" | ||||
|     is_active = True | ||||
|     is_superuser = True | ||||
|  | ||||
|     @property | ||||
|     def type(self): | ||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
|  | ||||
|     def has_perm(self, perm, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_perms(self, perm_list, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_module_perms(self, module): | ||||
|         return True | ||||
|  | ||||
|     @property | ||||
|     def is_anonymous(self): | ||||
|         return False | ||||
|  | ||||
|     @property | ||||
|     def is_authenticated(self): | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def token_ipc(value: str) -> User | None: | ||||
|     """Check if the token is the secret key | ||||
|     and return the service account for the managed outpost""" | ||||
|     if not ipc_key or not compare_digest(value, ipc_key): | ||||
|         return None | ||||
|     return IPCUser() | ||||
|  | ||||
|  | ||||
| class TokenAuthentication(BaseAuthentication): | ||||
|     """Token-based authentication using HTTP Bearer authentication""" | ||||
|  | ||||
|  | ||||
| @ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "flow_device_code", | ||||
|             "default_application", | ||||
|             "web_certificate", | ||||
|             "client_certificates", | ||||
|             "attributes", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
| @ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "domain", | ||||
|         "branding_title", | ||||
|         "web_certificate__name", | ||||
|         "client_certificates__name", | ||||
|     ] | ||||
|     filterset_fields = [ | ||||
|         "brand_uuid", | ||||
| @ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "flow_user_settings", | ||||
|         "flow_device_code", | ||||
|         "web_certificate", | ||||
|         "client_certificates", | ||||
|     ] | ||||
|     ordering = ["domain"] | ||||
|  | ||||
|  | ||||
| @ -1,37 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="client_certificates", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 help_text="Certificates used for client authentication.", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="brand", | ||||
|             name="web_certificate", | ||||
|             field=models.ForeignKey( | ||||
|                 default=None, | ||||
|                 help_text="Web Certificate used by the authentik Core webserver.", | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, | ||||
|                 related_name="+", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -73,13 +73,6 @@ class Brand(SerializerModel): | ||||
|         default=None, | ||||
|         on_delete=models.SET_DEFAULT, | ||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||
|         related_name="+", | ||||
|     ) | ||||
|     client_certificates = models.ManyToManyField( | ||||
|         CertificateKeyPair, | ||||
|         default=None, | ||||
|         blank=True, | ||||
|         help_text=_("Certificates used for client authentication."), | ||||
|     ) | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|  | ||||
| @ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.models import get_permission_choices | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: | ||||
|     def _create_recovery_link(self) -> tuple[str, Token]: | ||||
|         """Create a recovery link (when the current brand has a recovery flow set), | ||||
|         that can either be shown to an admin or sent to the user directly""" | ||||
|         brand: Brand = self.request._request.brand | ||||
| @ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             raise ValidationError( | ||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||
|             ) from None | ||||
|         _plan = FlowToken.pickle(plan) | ||||
|         if for_email: | ||||
|             _plan = pickle_flow_token_for_email(plan) | ||||
|         token, __ = FlowToken.objects.update_or_create( | ||||
|             identifier=f"{user.uid}-password-reset", | ||||
|             defaults={ | ||||
|                 "user": user, | ||||
|                 "flow": flow, | ||||
|                 "_plan": _plan, | ||||
|                 "revoke_on_execution": not for_email, | ||||
|                 "_plan": FlowToken.pickle(plan), | ||||
|             }, | ||||
|         ) | ||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||
| @ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         if for_user.email == "": | ||||
|             LOGGER.debug("User doesn't have an email address") | ||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||
|         link, token = self._create_recovery_link(for_email=True) | ||||
|         link, token = self._create_recovery_link() | ||||
|         # Lookup the email stage to assure the current user can access it | ||||
|         stages = get_objects_for_user( | ||||
|             request.user, "authentik_stages_email.view_emailstage" | ||||
|  | ||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.core.models import UserTypes | ||||
| from authentik.crypto.apps import MANAGED_KEY | ||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| @ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|     def view_certificate(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs certificate and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: | ||||
|             Event.new(  # noqa # nosec | ||||
|                 EventAction.SECRET_VIEW, | ||||
|                 secret=certificate, | ||||
|                 type="certificate", | ||||
|             ).from_http(request) | ||||
|         Event.new(  # noqa # nosec | ||||
|             EventAction.SECRET_VIEW, | ||||
|             secret=certificate, | ||||
|             type="certificate", | ||||
|         ).from_http(request) | ||||
|         if "download" in request.query_params: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse( | ||||
| @ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|     def view_private_key(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs private key and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: | ||||
|             Event.new(  # noqa # nosec | ||||
|                 EventAction.SECRET_VIEW, | ||||
|                 secret=certificate, | ||||
|                 type="private_key", | ||||
|             ).from_http(request) | ||||
|         Event.new(  # noqa # nosec | ||||
|             EventAction.SECRET_VIEW, | ||||
|             secret=certificate, | ||||
|             type="private_key", | ||||
|         ).from_http(request) | ||||
|         if "download" in request.query_params: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") | ||||
|  | ||||
| @ -1,12 +0,0 @@ | ||||
| """authentik endpoints app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEndpointsConfig(ManagedAppConfig): | ||||
|     """authentik endpoints app config""" | ||||
|  | ||||
|     name = "authentik.endpoints" | ||||
|     label = "authentik_endpoints" | ||||
|     verbose_name = "authentik Endpoints" | ||||
|     default = True | ||||
| @ -1,47 +0,0 @@ | ||||
| from enum import Enum | ||||
|  | ||||
| from pydantic import BaseModel | ||||
|  | ||||
|  | ||||
| class UNSUPPORTED(BaseModel): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class OSFamily(Enum): | ||||
|  | ||||
|     linux = "linux" | ||||
|     unix = "unix" | ||||
|     bsd = "bsd" | ||||
|     windows = "windows" | ||||
|     macOS = "mac_os" | ||||
|     android = "android" | ||||
|     iOS = "i_os" | ||||
|     other = "other" | ||||
|  | ||||
| class CommonDeviceData(BaseModel): | ||||
|     class Disk(BaseModel): | ||||
|         encryption: bool | ||||
|  | ||||
|     class OS(BaseModel): | ||||
|         firewall_enabled: bool | ||||
|         family: OSFamily | ||||
|         name: str | ||||
|         version: str | ||||
|  | ||||
|     class Network(BaseModel): | ||||
|         hostname: str | ||||
|         dns_servers: list[str] | ||||
|  | ||||
|     class Hardware(BaseModel): | ||||
|         model: str | ||||
|         manufacturer: str | ||||
|  | ||||
|     class Software(BaseModel): | ||||
|         name: str | ||||
|         version: str | ||||
|  | ||||
|     os: OS | UNSUPPORTED | ||||
|     disks: list[Disk] | UNSUPPORTED | ||||
|     network: Network | UNSUPPORTED | ||||
|     hardware: Hardware | UNSUPPORTED | ||||
|     software: list[Software] | UNSUPPORTED | ||||
| @ -1,16 +0,0 @@ | ||||
| from authentik.blueprints import models | ||||
|  | ||||
|  | ||||
| class EnrollmentMethods(models.TextChoices): | ||||
|     AUTOMATIC_USER = "automatic_user"  # Automatically enrolled through user action | ||||
|     AUTOMATIC_API = "automatic_api"  # Automatically enrolled through connector integration | ||||
|     MANUAL_USER = "manual_user"  # Manually enrolled | ||||
|  | ||||
|  | ||||
| class BaseConnector: | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def supported_enrollment_methods(self) -> list[EnrollmentMethods]: | ||||
|         return [] | ||||
| @ -1,7 +0,0 @@ | ||||
| from authentik.endpoints.connector import BaseConnector, EnrollmentMethods | ||||
|  | ||||
|  | ||||
| class GoogleChromeConnector(BaseConnector): | ||||
|  | ||||
|     def supported_enrollment_methods(self) -> list[EnrollmentMethods]: | ||||
|         return [EnrollmentMethods.AUTOMATIC_USER] | ||||
| @ -1,7 +0,0 @@ | ||||
| from django.db import models | ||||
|  | ||||
| from authentik.endpoints.models import Connector | ||||
|  | ||||
|  | ||||
| class GoogleChromeConnector(Connector): | ||||
|     credentials = models.JSONField() | ||||
| @ -1,125 +0,0 @@ | ||||
| # Generated by Django 5.0.9 on 2024-09-24 19:16 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| import uuid | ||||
| from django.conf import settings | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="Connector", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("connector_uuid", models.UUIDField(default=uuid.uuid4)), | ||||
|                 ("name", models.TextField()), | ||||
|                 ( | ||||
|                     "enrollment_method", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ("automatic_user", "Automatic User"), | ||||
|                             ("automatic_api", "Automatic Api"), | ||||
|                             ("manual_user", "Manual User"), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "abstract": False, | ||||
|             }, | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="Device", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("device_uuid", models.UUIDField(default=uuid.uuid4)), | ||||
|                 ("identifier", models.TextField(unique=True)), | ||||
|             ], | ||||
|             options={ | ||||
|                 "abstract": False, | ||||
|             }, | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="DeviceConnection", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("device_connection_uuid", models.UUIDField(default=uuid.uuid4)), | ||||
|                 ("data", models.JSONField(default=dict)), | ||||
|                 ( | ||||
|                     "connection", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_endpoints.connector", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "device", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_endpoints.device" | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="device", | ||||
|             name="connections", | ||||
|             field=models.ManyToManyField( | ||||
|                 through="authentik_endpoints.DeviceConnection", to="authentik_endpoints.connector" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="DeviceUser", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("device_user_uuid", models.UUIDField(default=uuid.uuid4)), | ||||
|                 ("is_primary", models.BooleanField()), | ||||
|                 ( | ||||
|                     "device", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_endpoints.device" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="device", | ||||
|             name="users", | ||||
|             field=models.ManyToManyField( | ||||
|                 through="authentik_endpoints.DeviceUser", to=settings.AUTH_USER_MODEL | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,40 +0,0 @@ | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.db import models | ||||
| from django.utils.functional import cached_property | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.endpoints.common_data import CommonDeviceData | ||||
| from authentik.lib.models import SerializerModel | ||||
|  | ||||
|  | ||||
| class Device(SerializerModel): | ||||
|     device_uuid = models.UUIDField(default=uuid4) | ||||
|  | ||||
|     identifier = models.TextField(unique=True) | ||||
|     users = models.ManyToManyField(User, through="DeviceUser") | ||||
|     connections = models.ManyToManyField("Connector", through="DeviceConnection") | ||||
|  | ||||
|     @cached_property | ||||
|     def data(self) -> CommonDeviceData: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class DeviceUser(models.Model): | ||||
|     device_user_uuid = models.UUIDField(default=uuid4) | ||||
|     device = models.ForeignKey("Device", on_delete=models.CASCADE) | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|     is_primary = models.BooleanField() | ||||
|  | ||||
|  | ||||
| class DeviceConnection(models.Model): | ||||
|     device_connection_uuid = models.UUIDField(default=uuid4) | ||||
|     device = models.ForeignKey("Device", on_delete=models.CASCADE) | ||||
|     connection = models.ForeignKey("Connector", on_delete=models.CASCADE) | ||||
|     data = models.JSONField(default=dict) | ||||
|  | ||||
|  | ||||
| class Connector(SerializerModel): | ||||
|     connector_uuid = models.UUIDField(default=uuid4) | ||||
|  | ||||
|     name = models.TextField() | ||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | ||||
|     """Google client for groups""" | ||||
|  | ||||
|     connection_type = GoogleWorkspaceProviderGroup | ||||
|     connection_attr = "googleworkspaceprovidergroup_set" | ||||
|     connection_type_query = "group" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||
|  | ||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | ||||
|     """Sync authentik users into google workspace""" | ||||
|  | ||||
|     connection_type = GoogleWorkspaceProviderUser | ||||
|     connection_attr = "googleworkspaceprovideruser_set" | ||||
|     connection_type_query = "user" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||
|  | ||||
| @ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|         if type == User: | ||||
|             # Get queryset of all users with consistent ordering | ||||
|             # according to the provider's settings | ||||
|             base = ( | ||||
|                 User.objects.prefetch_related("googleworkspaceprovideruser_set") | ||||
|                 .all() | ||||
|                 .exclude_anonymous() | ||||
|             ) | ||||
|             base = User.objects.all().exclude_anonymous() | ||||
|             if self.exclude_users_service_account: | ||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
| @ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|             return base.order_by("pk") | ||||
|         if type == Group: | ||||
|             # Get queryset of all groups with consistent ordering | ||||
|             return ( | ||||
|                 Group.objects.prefetch_related("googleworkspaceprovidergroup_set") | ||||
|                 .all() | ||||
|                 .order_by("pk") | ||||
|             ) | ||||
|             return Group.objects.all().order_by("pk") | ||||
|         raise ValueError(f"Invalid type {type}") | ||||
|  | ||||
|     def google_credentials(self): | ||||
|  | ||||
| @ -29,7 +29,7 @@ class MicrosoftEntraGroupClient( | ||||
|     """Microsoft client for groups""" | ||||
|  | ||||
|     connection_type = MicrosoftEntraProviderGroup | ||||
|     connection_attr = "microsoftentraprovidergroup_set" | ||||
|     connection_type_query = "group" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||
|  | ||||
| @ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv | ||||
|     """Sync authentik users into microsoft entra""" | ||||
|  | ||||
|     connection_type = MicrosoftEntraProviderUser | ||||
|     connection_attr = "microsoftentraprovideruser_set" | ||||
|     connection_type_query = "user" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||
|  | ||||
| @ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|         if type == User: | ||||
|             # Get queryset of all users with consistent ordering | ||||
|             # according to the provider's settings | ||||
|             base = ( | ||||
|                 User.objects.prefetch_related("microsoftentraprovideruser_set") | ||||
|                 .all() | ||||
|                 .exclude_anonymous() | ||||
|             ) | ||||
|             base = User.objects.all().exclude_anonymous() | ||||
|             if self.exclude_users_service_account: | ||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
| @ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|             return base.order_by("pk") | ||||
|         if type == Group: | ||||
|             # Get queryset of all groups with consistent ordering | ||||
|             return ( | ||||
|                 Group.objects.prefetch_related("microsoftentraprovidergroup_set") | ||||
|                 .all() | ||||
|                 .order_by("pk") | ||||
|             ) | ||||
|             return Group.objects.all().order_by("pk") | ||||
|         raise ValueError(f"Invalid type {type}") | ||||
|  | ||||
|     def microsoft_credentials(self): | ||||
|  | ||||
| @ -19,7 +19,6 @@ TENANT_APPS = [ | ||||
|     "authentik.enterprise.providers.microsoft_entra", | ||||
|     "authentik.enterprise.providers.ssf", | ||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||
|     "authentik.enterprise.stages.mtls", | ||||
|     "authentik.enterprise.stages.source", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @ -1,31 +0,0 @@ | ||||
| """Mutual TLS Stage API Views""" | ||||
|  | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.stages.mtls.models import MutualTLSStage | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
|  | ||||
|  | ||||
| class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer): | ||||
|     """MutualTLSStage Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = MutualTLSStage | ||||
|         fields = StageSerializer.Meta.fields + [ | ||||
|             "mode", | ||||
|             "certificate_authorities", | ||||
|             "cert_attribute", | ||||
|             "user_attribute", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class MutualTLSStageViewSet(UsedByMixin, ModelViewSet): | ||||
|     """MutualTLSStage Viewset""" | ||||
|  | ||||
|     queryset = MutualTLSStage.objects.all() | ||||
|     serializer_class = MutualTLSStageSerializer | ||||
|     filterset_fields = "__all__" | ||||
|     ordering = ["name"] | ||||
|     search_fields = ["name"] | ||||
| @ -1,12 +0,0 @@ | ||||
| """authentik stage app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig): | ||||
|     """authentik MTLS stage config""" | ||||
|  | ||||
|     name = "authentik.enterprise.stages.mtls" | ||||
|     label = "authentik_stages_mtls" | ||||
|     verbose_name = "authentik Enterprise.Stages.MTLS" | ||||
|     default = True | ||||
| @ -1,68 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-19 18:29 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="MutualTLSStage", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "stage_ptr", | ||||
|                     models.OneToOneField( | ||||
|                         auto_created=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         parent_link=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_flows.stage", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "mode", | ||||
|                     models.TextField(choices=[("optional", "Optional"), ("required", "Required")]), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "cert_attribute", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ("subject", "Subject"), | ||||
|                             ("common_name", "Common Name"), | ||||
|                             ("email", "Email"), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user_attribute", | ||||
|                     models.TextField(choices=[("username", "Username"), ("email", "Email")]), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "certificate_authorities", | ||||
|                     models.ManyToManyField( | ||||
|                         blank=True, | ||||
|                         default=None, | ||||
|                         help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.", | ||||
|                         to="authentik_crypto.certificatekeypair", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Mutual TLS Stage", | ||||
|                 "verbose_name_plural": "Mutual TLS Stages", | ||||
|                 "permissions": [ | ||||
|                     ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.") | ||||
|                 ], | ||||
|             }, | ||||
|             bases=("authentik_flows.stage",), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,71 +0,0 @@ | ||||
| from django.db import models | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from rest_framework.serializers import Serializer | ||||
|  | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.flows.models import Stage | ||||
| from authentik.flows.stage import StageView | ||||
|  | ||||
|  | ||||
| class TLSMode(models.TextChoices): | ||||
|     """Modes the TLS Stage can operate in""" | ||||
|  | ||||
|     OPTIONAL = "optional" | ||||
|     REQUIRED = "required" | ||||
|  | ||||
|  | ||||
| class CertAttributes(models.TextChoices): | ||||
|     """Certificate attribute used for user matching""" | ||||
|  | ||||
|     SUBJECT = "subject" | ||||
|     COMMON_NAME = "common_name" | ||||
|     EMAIL = "email" | ||||
|  | ||||
|  | ||||
| class UserAttributes(models.TextChoices): | ||||
|     """User attribute for user matching""" | ||||
|  | ||||
|     USERNAME = "username" | ||||
|     EMAIL = "email" | ||||
|  | ||||
|  | ||||
| class MutualTLSStage(Stage): | ||||
|     """Authenticate/enroll users using a client-certificate.""" | ||||
|  | ||||
|     mode = models.TextField(choices=TLSMode.choices) | ||||
|  | ||||
|     certificate_authorities = models.ManyToManyField( | ||||
|         CertificateKeyPair, | ||||
|         default=None, | ||||
|         blank=True, | ||||
|         help_text=_( | ||||
|             "Configure certificate authorities to validate the certificate against. " | ||||
|             "This option has a higher priority than the `client_certificate` option on `Brand`." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     cert_attribute = models.TextField(choices=CertAttributes.choices) | ||||
|     user_attribute = models.TextField(choices=UserAttributes.choices) | ||||
|  | ||||
|     @property | ||||
|     def view(self) -> type[StageView]: | ||||
|         from authentik.enterprise.stages.mtls.stage import MTLSStageView | ||||
|  | ||||
|         return MTLSStageView | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer | ||||
|  | ||||
|         return MutualTLSStageSerializer | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-stage-mtls-form" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Mutual TLS Stage") | ||||
|         verbose_name_plural = _("Mutual TLS Stages") | ||||
|         permissions = [ | ||||
|             ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")), | ||||
|         ] | ||||
| @ -1,230 +0,0 @@ | ||||
| from binascii import hexlify | ||||
| from urllib.parse import unquote_plus | ||||
|  | ||||
| from cryptography.exceptions import InvalidSignature | ||||
| from cryptography.hazmat.primitives import hashes | ||||
| from cryptography.x509 import ( | ||||
|     Certificate, | ||||
|     NameOID, | ||||
|     ObjectIdentifier, | ||||
|     UnsupportedGeneralNameType, | ||||
|     load_pem_x509_certificate, | ||||
| ) | ||||
| from cryptography.x509.verification import PolicyBuilder, Store, VerificationError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
|  | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.models import User | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.enterprise.stages.mtls.models import ( | ||||
|     CertAttributes, | ||||
|     MutualTLSStage, | ||||
|     TLSMode, | ||||
|     UserAttributes, | ||||
| ) | ||||
| from authentik.flows.challenge import AccessDeniedChallenge | ||||
| from authentik.flows.models import FlowDesignation | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER | ||||
| from authentik.flows.stage import ChallengeStageView | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
| # All of these headers must only be accepted from "trusted" reverse proxies | ||||
| # See internal/web/proxy.go:39 | ||||
| HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert" | ||||
| HEADER_NGINX_FORWARDED = "SSL-Client-Cert" | ||||
| HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert" | ||||
| HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate" | ||||
|  | ||||
|  | ||||
| PLAN_CONTEXT_CERTIFICATE = "certificate" | ||||
|  | ||||
|  | ||||
| class MTLSStageView(ChallengeStageView): | ||||
|  | ||||
|     def __parse_single_cert(self, raw: str | None) -> list[Certificate]: | ||||
|         """Helper to parse a single certificate""" | ||||
|         if not raw: | ||||
|             return [] | ||||
|         try: | ||||
|             cert = load_pem_x509_certificate(unquote_plus(raw).encode()) | ||||
|             return [cert] | ||||
|         except ValueError as exc: | ||||
|             self.logger.info("Failed to parse certificate", exc=exc) | ||||
|             return [] | ||||
|  | ||||
|     def _parse_cert_xfcc(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format given to us in | ||||
|         the format of the authentik router/envoy""" | ||||
|         xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED) | ||||
|         if not xfcc_raw: | ||||
|             return [] | ||||
|         certs = [] | ||||
|         for r_cert in xfcc_raw.split(","): | ||||
|             el = r_cert.split(";") | ||||
|             raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el} | ||||
|             if "Cert" not in raw_cert: | ||||
|                 continue | ||||
|             certs.extend(self.__parse_single_cert(raw_cert["Cert"])) | ||||
|         return certs | ||||
|  | ||||
|     def _parse_cert_nginx(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format nginx-ingress gives to us""" | ||||
|         sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED) | ||||
|         return self.__parse_single_cert(sslcc_raw) | ||||
|  | ||||
|     def _parse_cert_traefik(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format traefik gives to us""" | ||||
|         ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED) | ||||
|         return self.__parse_single_cert(ftcc_raw) | ||||
|  | ||||
|     def _parse_cert_outpost(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format outposts give to us. Also authenticates | ||||
|         the outpost to ensure it has the permission to do so""" | ||||
|         user = ClientIPMiddleware.get_outpost_user(self.request) | ||||
|         if not user: | ||||
|             return [] | ||||
|         if not user.has_perm( | ||||
|             "pass_outpost_certificate", self.executor.current_stage | ||||
|         ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"): | ||||
|             return [] | ||||
|         outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED) | ||||
|         return self.__parse_single_cert(outpost_raw) | ||||
|  | ||||
|     def get_authorities(self) -> list[CertificateKeyPair] | None: | ||||
|         # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would | ||||
|         # load the certificate into the directly referenced foreign key, which we have to pickle | ||||
|         # as part of the flow plan, and cryptography certs can't be pickled | ||||
|         stage: MutualTLSStage = ( | ||||
|             MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk) | ||||
|             .prefetch_related("certificate_authorities") | ||||
|             .first() | ||||
|         ) | ||||
|         if stage.certificate_authorities.exists(): | ||||
|             return stage.certificate_authorities.order_by("name") | ||||
|         brand: Brand = self.request.brand | ||||
|         if brand.client_certificates.exists(): | ||||
|             return brand.client_certificates.order_by("name") | ||||
|         return None | ||||
|  | ||||
|     def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): | ||||
|         authorities_cert = [x.certificate for x in authorities] | ||||
|         for _cert in certs: | ||||
|             try: | ||||
|                 PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( | ||||
|                     _cert, [] | ||||
|                 ) | ||||
|                 return _cert | ||||
|             except ( | ||||
|                 InvalidSignature, | ||||
|                 TypeError, | ||||
|                 ValueError, | ||||
|                 VerificationError, | ||||
|                 UnsupportedGeneralNameType, | ||||
|             ) as exc: | ||||
|                 self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc) | ||||
|                 continue | ||||
|         return None | ||||
|  | ||||
|     def check_if_user(self, cert: Certificate): | ||||
|         stage: MutualTLSStage = self.executor.current_stage | ||||
|         cert_attr = None | ||||
|         user_attr = None | ||||
|         match stage.cert_attribute: | ||||
|             case CertAttributes.SUBJECT: | ||||
|                 cert_attr = cert.subject.rfc4514_string() | ||||
|             case CertAttributes.COMMON_NAME: | ||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME) | ||||
|             case CertAttributes.EMAIL: | ||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS) | ||||
|         match stage.user_attribute: | ||||
|             case UserAttributes.USERNAME: | ||||
|                 user_attr = "username" | ||||
|             case UserAttributes.EMAIL: | ||||
|                 user_attr = "email" | ||||
|         if not user_attr or not cert_attr: | ||||
|             return None | ||||
|         return User.objects.filter(**{user_attr: cert_attr}).first() | ||||
|  | ||||
|     def _cert_to_dict(self, cert: Certificate) -> dict: | ||||
|         """Represent a certificate in a dictionary, as certificate objects cannot be pickled""" | ||||
|         return { | ||||
|             "serial_number": str(cert.serial_number), | ||||
|             "subject": cert.subject.rfc4514_string(), | ||||
|             "issuer": cert.issuer.rfc4514_string(), | ||||
|             "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), | ||||
|             "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec | ||||
|                 "utf-8" | ||||
|             ), | ||||
|         } | ||||
|  | ||||
|     def auth_user(self, user: User, cert: Certificate): | ||||
|         self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls") | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update( | ||||
|             {"certificate": self._cert_to_dict(cert)} | ||||
|         ) | ||||
|  | ||||
|     def enroll_prepare_user(self, cert: Certificate): | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {}) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_PROMPT].update( | ||||
|             { | ||||
|                 "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS), | ||||
|                 "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME), | ||||
|             } | ||||
|         ) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert) | ||||
|  | ||||
|     def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None: | ||||
|         attr = cert.subject.get_attributes_for_oid(oid) | ||||
|         if len(attr) < 1: | ||||
|             return None | ||||
|         return str(attr[0].value) | ||||
|  | ||||
|     def dispatch(self, request, *args, **kwargs): | ||||
|         stage: MutualTLSStage = self.executor.current_stage | ||||
|         certs = [ | ||||
|             *self._parse_cert_xfcc(), | ||||
|             *self._parse_cert_nginx(), | ||||
|             *self._parse_cert_traefik(), | ||||
|             *self._parse_cert_outpost(), | ||||
|         ] | ||||
|         authorities = self.get_authorities() | ||||
|         if not authorities: | ||||
|             self.logger.warning("No Certificate authority found") | ||||
|             if stage.mode == TLSMode.OPTIONAL: | ||||
|                 return self.executor.stage_ok() | ||||
|             if stage.mode == TLSMode.REQUIRED: | ||||
|                 return super().dispatch(request, *args, **kwargs) | ||||
|         cert = self.validate_cert(authorities, certs) | ||||
|         if not cert and stage.mode == TLSMode.REQUIRED: | ||||
|             self.logger.warning("Client certificate required but no certificates given") | ||||
|             return super().dispatch( | ||||
|                 request, | ||||
|                 *args, | ||||
|                 error_message=_("Certificate required but no certificate was given."), | ||||
|                 **kwargs, | ||||
|             ) | ||||
|         if not cert and stage.mode == TLSMode.OPTIONAL: | ||||
|             self.logger.info("No certificate given, continuing") | ||||
|             return self.executor.stage_ok() | ||||
|         existing_user = self.check_if_user(cert) | ||||
|         if self.executor.flow.designation == FlowDesignation.ENROLLMENT: | ||||
|             self.enroll_prepare_user(cert) | ||||
|         elif existing_user: | ||||
|             self.auth_user(existing_user, cert) | ||||
|         else: | ||||
|             return super().dispatch( | ||||
|                 request, *args, error_message=_("No user found for certificate."), **kwargs | ||||
|             ) | ||||
|         return self.executor.stage_ok() | ||||
|  | ||||
|     def get_challenge(self, *args, error_message: str | None = None, **kwargs): | ||||
|         return AccessDeniedChallenge( | ||||
|             data={ | ||||
|                 "component": "ak-stage-access-denied", | ||||
|                 "error_message": str(error_message or "Unknown error"), | ||||
|             } | ||||
|         ) | ||||
| @ -1,31 +0,0 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL | ||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl | ||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw | ||||
| MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE | ||||
| CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN | ||||
| AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x | ||||
| LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje | ||||
| O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+ | ||||
| 5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2 | ||||
| pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A | ||||
| SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1 | ||||
| 2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza | ||||
| hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7 | ||||
| WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF | ||||
| HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu | ||||
| YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY | ||||
| 0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G | ||||
| A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA | ||||
| NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2 | ||||
| 6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo | ||||
| +jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV | ||||
| xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2 | ||||
| C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq | ||||
| nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz | ||||
| NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1 | ||||
| uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ | ||||
| jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG | ||||
| G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0 | ||||
| YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk= | ||||
| -----END CERTIFICATE----- | ||||
| @ -1,31 +0,0 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL | ||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl | ||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw | ||||
| NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA | ||||
| A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 | ||||
| 7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO | ||||
| mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj | ||||
| +mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S | ||||
| qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 | ||||
| +yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC | ||||
| 3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O | ||||
| O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E | ||||
| 0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh | ||||
| wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw | ||||
| Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID | ||||
| AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE | ||||
| FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud | ||||
| DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz | ||||
| YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw | ||||
| zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi | ||||
| 9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ | ||||
| /CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp | ||||
| dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE | ||||
| AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV | ||||
| 9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 | ||||
| m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L | ||||
| jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ | ||||
| NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu | ||||
| nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA= | ||||
| -----END CERTIFICATE----- | ||||
| @ -1,228 +0,0 @@ | ||||
| from unittest.mock import MagicMock, patch | ||||
| from urllib.parse import quote_plus | ||||
|  | ||||
| from django.urls import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.core.tests.utils import ( | ||||
|     create_test_brand, | ||||
|     create_test_cert, | ||||
|     create_test_flow, | ||||
|     create_test_user, | ||||
| ) | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.enterprise.stages.mtls.models import ( | ||||
|     CertAttributes, | ||||
|     MutualTLSStage, | ||||
|     TLSMode, | ||||
|     UserAttributes, | ||||
| ) | ||||
| from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE | ||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import load_fixture | ||||
| from authentik.outposts.models import Outpost, OutpostType | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
|  | ||||
| class MTLSStageTests(FlowTestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         super().setUp() | ||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
|         self.ca = CertificateKeyPair.objects.create( | ||||
|             name=generate_id(), | ||||
|             certificate_data=load_fixture("fixtures/ca.pem"), | ||||
|         ) | ||||
|         self.stage = MutualTLSStage.objects.create( | ||||
|             name=generate_id(), | ||||
|             mode=TLSMode.REQUIRED, | ||||
|             cert_attribute=CertAttributes.COMMON_NAME, | ||||
|             user_attribute=UserAttributes.USERNAME, | ||||
|         ) | ||||
|  | ||||
|         self.stage.certificate_authorities.add(self.ca) | ||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0) | ||||
|         self.client_cert = load_fixture("fixtures/cert_client.pem") | ||||
|         # User matching the certificate | ||||
|         User.objects.filter(username="client").delete() | ||||
|         self.cert_user = create_test_user(username="client") | ||||
|  | ||||
|     def test_parse_xfcc(self): | ||||
|         """Test authentik Proxy/Envoy's XFCC format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_nginx(self): | ||||
|         """Test nginx's format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"SSL-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_traefik(self): | ||||
|         """Test traefik's format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_object(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         assign_perm("pass_outpost_certificate", outpost.user, self.stage) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             with self.assertFlowFinishes() as plan: | ||||
|                 res = self.client.get( | ||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|                 ) | ||||
|                 self.assertEqual(res.status_code, 200) | ||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_global(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             with self.assertFlowFinishes() as plan: | ||||
|                 res = self.client.get( | ||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|                 ) | ||||
|                 self.assertEqual(res.status_code, 200) | ||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_no_perm(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_invalid_cert(self): | ||||
|         """Test invalid certificate""" | ||||
|         cert = create_test_cert() | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|         self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context) | ||||
|  | ||||
|     def test_auth_no_user(self): | ||||
|         """Test auth with no user""" | ||||
|         User.objects.filter(username="client").delete() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_brand_ca(self): | ||||
|         """Test using a CA from the brand""" | ||||
|         self.stage.certificate_authorities.clear() | ||||
|  | ||||
|         brand = create_test_brand() | ||||
|         brand.client_certificates.add(self.ca) | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_no_ca_optional(self): | ||||
|         """Test using no CA Set""" | ||||
|         self.stage.mode = TLSMode.OPTIONAL | ||||
|         self.stage.certificate_authorities.clear() | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_no_ca_required(self): | ||||
|         """Test using no CA Set""" | ||||
|         self.stage.certificate_authorities.clear() | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_no_cert_optional(self): | ||||
|         """Test using no cert Set""" | ||||
|         self.stage.mode = TLSMode.OPTIONAL | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_enroll(self): | ||||
|         """Test Enrollment flow""" | ||||
|         self.flow.designation = FlowDesignation.ENROLLMENT | ||||
|         self.flow.save() | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"}) | ||||
|         self.assertEqual( | ||||
|             plan().context[PLAN_CONTEXT_CERTIFICATE], | ||||
|             { | ||||
|                 "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", | ||||
|                 "fingerprint_sha256": ( | ||||
|                     "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" | ||||
|                 ), | ||||
|                 "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", | ||||
|                 "serial_number": "70153443448884702681996102271549704759327537151", | ||||
|                 "subject": "CN=client", | ||||
|             }, | ||||
|         ) | ||||
| @ -1,5 +0,0 @@ | ||||
| """API URLs""" | ||||
|  | ||||
| from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet | ||||
|  | ||||
| api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="flowtoken", | ||||
|             name="revoke_on_execution", | ||||
|             field=models.BooleanField(default=True), | ||||
|         ), | ||||
|     ] | ||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | ||||
|  | ||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||
|     _plan = models.TextField() | ||||
|     revoke_on_execution = models.BooleanField(default=True) | ||||
|  | ||||
|     @staticmethod | ||||
|     def pickle(plan: "FlowPlan") -> str: | ||||
|     def pickle(plan) -> str: | ||||
|         """Pickle into string""" | ||||
|         data = dumps(plan) | ||||
|         return b64encode(data).decode() | ||||
|  | ||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | ||||
|             self.logger.debug("Got StageInvalidException", exc=exc) | ||||
|             return self.executor.stage_invalid() | ||||
|         if not challenge.is_valid(): | ||||
|             self.logger.error( | ||||
|             self.logger.warning( | ||||
|                 "f(ch): Invalid challenge", | ||||
|                 errors=challenge.errors, | ||||
|                 challenge=challenge.data, | ||||
|             ) | ||||
|         return HttpChallengeResponse(challenge) | ||||
|  | ||||
|  | ||||
| @ -1,10 +1,7 @@ | ||||
| """Test helpers""" | ||||
|  | ||||
| from collections.abc import Callable, Generator | ||||
| from contextlib import contextmanager | ||||
| from json import loads | ||||
| from typing import Any | ||||
| from unittest.mock import MagicMock, patch | ||||
|  | ||||
| from django.http.response import HttpResponse | ||||
| from django.urls.base import reverse | ||||
| @ -12,8 +9,6 @@ from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.flows.planner import FlowPlan | ||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
|  | ||||
|  | ||||
| class FlowTestCase(APITestCase): | ||||
| @ -49,12 +44,3 @@ class FlowTestCase(APITestCase): | ||||
|     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||
|         """Wrapper around assertStageResponse that checks for a redirect""" | ||||
|         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) | ||||
|  | ||||
|     @contextmanager | ||||
|     def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]: | ||||
|         """Capture the flow plan before the flow finishes and return it""" | ||||
|         try: | ||||
|             with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): | ||||
|                 yield lambda: self.client.session.get(SESSION_KEY_PLAN) | ||||
|         finally: | ||||
|             pass | ||||
|  | ||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | ||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||
|         finally: | ||||
|             if token.revoke_on_execution: | ||||
|                 token.delete() | ||||
|             token.delete() | ||||
|         if not isinstance(plan, FlowPlan): | ||||
|             return None | ||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|  | ||||
| @ -23,6 +23,7 @@ if TYPE_CHECKING: | ||||
|  | ||||
|  | ||||
| class Direction(StrEnum): | ||||
|  | ||||
|     add = "add" | ||||
|     remove = "remove" | ||||
|  | ||||
| @ -36,16 +37,13 @@ SAFE_METHODS = [ | ||||
|  | ||||
|  | ||||
| class BaseOutgoingSyncClient[ | ||||
|     TModel: "Model", | ||||
|     TConnection: "Model", | ||||
|     TSchema: dict, | ||||
|     TProvider: "OutgoingSyncProvider", | ||||
|     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" | ||||
| ]: | ||||
|     """Basic Outgoing sync client Client""" | ||||
|  | ||||
|     provider: TProvider | ||||
|     connection_type: type[TConnection] | ||||
|     connection_attr: str | ||||
|     connection_type_query: str | ||||
|     mapper: PropertyMappingManager | ||||
|  | ||||
|     can_discover = False | ||||
| @ -65,7 +63,9 @@ class BaseOutgoingSyncClient[ | ||||
|     def write(self, obj: TModel) -> tuple[TConnection, bool]: | ||||
|         """Write object to destination. Uses self.create and self.update, but | ||||
|         can be overwritten for further logic""" | ||||
|         connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first() | ||||
|         connection = self.connection_type.objects.filter( | ||||
|             provider=self.provider, **{self.connection_type_query: obj} | ||||
|         ).first() | ||||
|         try: | ||||
|             if not connection: | ||||
|                 connection = self.create(obj) | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| from collections.abc import Callable | ||||
| from dataclasses import asdict | ||||
|  | ||||
| from celery import group | ||||
| from celery.exceptions import Retry | ||||
| from celery.result import allow_join_result | ||||
| from django.core.paginator import Paginator | ||||
| @ -83,41 +82,21 @@ class SyncTasks: | ||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||
|                 return | ||||
|             try: | ||||
|                 messages.append(_("Syncing users")) | ||||
|                 user_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(User), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in users_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in user_results: | ||||
|                     for msg in result: | ||||
|                 for page in users_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(User), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|                 messages.append(_("Syncing groups")) | ||||
|                 group_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(Group), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in groups_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in group_results: | ||||
|                     for msg in result: | ||||
|                 for page in groups_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(Group), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|             except TransientSyncException as exc: | ||||
|                 self.logger.warning("transient sync exception", exc=exc) | ||||
| @ -153,15 +132,6 @@ class SyncTasks: | ||||
|             self.logger.debug("starting discover") | ||||
|             client.discover() | ||||
|         self.logger.debug("starting sync for page", page=page) | ||||
|         messages.append( | ||||
|             asdict( | ||||
|                 LogEvent( | ||||
|                     _("Syncing page {page} of groups".format(page=page)), | ||||
|                     log_level="info", | ||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|         for obj in paginator.page(page).object_list: | ||||
|             obj: Model | ||||
|             try: | ||||
|  | ||||
| @ -1,11 +1,9 @@ | ||||
| """Websocket tests""" | ||||
|  | ||||
| from dataclasses import asdict | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from channels.routing import URLRouter | ||||
| from channels.testing import WebsocketCommunicator | ||||
| from django.contrib.contenttypes.models import ContentType | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik import __version__ | ||||
| @ -16,12 +14,6 @@ from authentik.providers.proxy.models import ProxyProvider | ||||
| from authentik.root import websocket | ||||
|  | ||||
|  | ||||
| def patched__get_ct_cached(app_label, codename): | ||||
|     """Caches `ContentType` instances like its `QuerySet` does.""" | ||||
|     return ContentType.objects.get(app_label=app_label, permission__codename=codename) | ||||
|  | ||||
|  | ||||
| @patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached) | ||||
| class TestOutpostWS(TransactionTestCase): | ||||
|     """Websocket tests""" | ||||
|  | ||||
| @ -46,7 +38,6 @@ class TestOutpostWS(TransactionTestCase): | ||||
|         ) | ||||
|         connected, _ = await communicator.connect() | ||||
|         self.assertFalse(connected) | ||||
|         await communicator.disconnect() | ||||
|  | ||||
|     async def test_auth_valid(self): | ||||
|         """Test auth with token""" | ||||
| @ -57,7 +48,6 @@ class TestOutpostWS(TransactionTestCase): | ||||
|         ) | ||||
|         connected, _ = await communicator.connect() | ||||
|         self.assertTrue(connected) | ||||
|         await communicator.disconnect() | ||||
|  | ||||
|     async def test_send(self): | ||||
|         """Test sending of Hello""" | ||||
|  | ||||
| @ -7,8 +7,10 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     from authentik.core.models import User | ||||
|     from django.apps import apps as real_apps | ||||
|     from django.contrib.auth.management import create_permissions | ||||
|     from guardian.shortcuts import UserObjectPermission | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|  | ||||
| @ -50,4 +50,3 @@ AMR_PASSWORD = "pwd"  # nosec | ||||
| AMR_MFA = "mfa" | ||||
| AMR_OTP = "otp" | ||||
| AMR_WEBAUTHN = "user" | ||||
| AMR_SMART_CARD = "sc" | ||||
|  | ||||
| @ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import ( | ||||
|     ACR_AUTHENTIK_DEFAULT, | ||||
|     AMR_MFA, | ||||
|     AMR_PASSWORD, | ||||
|     AMR_SMART_CARD, | ||||
|     AMR_WEBAUTHN, | ||||
| ) | ||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||
| @ -140,10 +139,9 @@ class IDToken: | ||||
|                 amr.append(AMR_PASSWORD) | ||||
|             if method == "auth_webauthn_pwl": | ||||
|                 amr.append(AMR_WEBAUTHN) | ||||
|             if "certificate" in method_args: | ||||
|                 amr.append(AMR_SMART_CARD) | ||||
|             if "mfa_devices" in method_args: | ||||
|                 amr.append(AMR_MFA) | ||||
|                 if len(amr) > 0: | ||||
|                     amr.append(AMR_MFA) | ||||
|             if amr: | ||||
|                 id_token.amr = amr | ||||
|  | ||||
|  | ||||
| @ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]): | ||||
|     def reconcile(self, current: V1Ingress, reference: V1Ingress): | ||||
|         super().reconcile(current, reference) | ||||
|         self._check_annotations(current, reference) | ||||
|         if current.spec.ingress_class_name != reference.spec.ingress_class_name: | ||||
|             raise NeedsUpdate() | ||||
|         # Create a list of all expected host and tls hosts | ||||
|         expected_hosts = [] | ||||
|         expected_hosts_tls = [] | ||||
|  | ||||
| @ -34,7 +34,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | ||||
|     """SCIM client for groups""" | ||||
|  | ||||
|     connection_type = SCIMProviderGroup | ||||
|     connection_attr = "scimprovidergroup_set" | ||||
|     connection_type_query = "group" | ||||
|     mapper: PropertyMappingManager | ||||
|  | ||||
|     def __init__(self, provider: SCIMProvider): | ||||
|  | ||||
| @ -18,7 +18,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | ||||
|     """SCIM client for users""" | ||||
|  | ||||
|     connection_type = SCIMProviderUser | ||||
|     connection_attr = "scimprovideruser_set" | ||||
|     connection_type_query = "user" | ||||
|     mapper: PropertyMappingManager | ||||
|  | ||||
|     def __init__(self, provider: SCIMProvider): | ||||
|  | ||||
| @ -116,7 +116,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|         if type == User: | ||||
|             # Get queryset of all users with consistent ordering | ||||
|             # according to the provider's settings | ||||
|             base = User.objects.prefetch_related("scimprovideruser_set").all().exclude_anonymous() | ||||
|             base = User.objects.all().exclude_anonymous() | ||||
|             if self.exclude_users_service_account: | ||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
| @ -126,7 +126,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|             return base.order_by("pk") | ||||
|         if type == Group: | ||||
|             # Get queryset of all groups with consistent ordering | ||||
|             return Group.objects.prefetch_related("scimprovidergroup_set").all().order_by("pk") | ||||
|             return Group.objects.all().order_by("pk") | ||||
|         raise ValueError(f"Invalid type {type}") | ||||
|  | ||||
|     @property | ||||
|  | ||||
| @ -384,7 +384,7 @@ class SCIMUserTests(TestCase): | ||||
|                 self.assertIn(request.method, SAFE_METHODS) | ||||
|         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() | ||||
|         self.assertIsNotNone(task) | ||||
|         drop_msg = task.messages[3] | ||||
|         drop_msg = task.messages[2] | ||||
|         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") | ||||
|         self.assertIsNotNone(drop_msg["attributes"]["url"]) | ||||
|         self.assertIsNotNone(drop_msg["attributes"]["body"]) | ||||
|  | ||||
| @ -73,7 +73,6 @@ TENANT_APPS = [ | ||||
|     "authentik.admin", | ||||
|     "authentik.api", | ||||
|     "authentik.crypto", | ||||
|     "authentik.endpoints", | ||||
|     "authentik.flows", | ||||
|     "authentik.outposts", | ||||
|     "authentik.policies.dummy", | ||||
| @ -133,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True | ||||
| TENANT_BASE_SCHEMA = "template" | ||||
| PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") | ||||
|  | ||||
| GUARDIAN_MONKEY_PATCH_USER = False | ||||
| GUARDIAN_MONKEY_PATCH = False | ||||
|  | ||||
| SPECTACULAR_SETTINGS = { | ||||
|     "TITLE": "authentik", | ||||
| @ -425,7 +424,7 @@ else: | ||||
|         "BACKEND": "authentik.root.storages.FileStorage", | ||||
|         "OPTIONS": { | ||||
|             "location": Path(CONFIG.get("storage.media.file.path")), | ||||
|             "base_url": CONFIG.get("web.path", "/") + "media/", | ||||
|             "base_url": "/media/", | ||||
|         }, | ||||
|     } | ||||
|     # Compatibility for apps not supporting top-level STORAGES | ||||
|  | ||||
| @ -31,8 +31,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|  | ||||
|         if kwargs.get("randomly_seed", None): | ||||
|             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") | ||||
|         if kwargs.get("no_capture", False): | ||||
|             self.args.append("--capture=no") | ||||
|  | ||||
|         settings.TEST = True | ||||
|         settings.CELERY["task_always_eager"] = True | ||||
| @ -66,11 +64,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|             "Default behaviour: use random.Random().getrandbits(32), so the seed is" | ||||
|             "different on each run.", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--no-capture", | ||||
|             action="store_true", | ||||
|             help="Disable any capturing of stdout/stderr during tests.", | ||||
|         ) | ||||
|  | ||||
|     def run_tests(self, test_labels, extra_tests=None, **kwargs): | ||||
|         """Run pytest and return the exitcode. | ||||
|  | ||||
| @ -317,7 +317,7 @@ class KerberosSource(Source): | ||||
|                 usage="accept", name=name, store=self.get_gssapi_store() | ||||
|             ) | ||||
|         except gssapi.exceptions.GSSError as exc: | ||||
|             LOGGER.warning("GSSAPI credentials failure", exc=exc) | ||||
|             LOGGER.warn("GSSAPI credentials failure", exc=exc) | ||||
|             return None | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -111,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer): | ||||
|             "sync_parent_group", | ||||
|             "connectivity", | ||||
|             "lookup_groups_from_user", | ||||
|             "delete_not_found_objects", | ||||
|         ] | ||||
|         extra_kwargs = {"bind_password": {"write_only": True}} | ||||
|  | ||||
| @ -148,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | ||||
|         "user_property_mappings", | ||||
|         "group_property_mappings", | ||||
|         "lookup_groups_from_user", | ||||
|         "delete_not_found_objects", | ||||
|     ] | ||||
|     search_fields = ["name", "slug"] | ||||
|     ordering = ["name"] | ||||
|  | ||||
| @ -1,48 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-28 08:15 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"), | ||||
|         ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="groupldapsourceconnection", | ||||
|             name="validated_by", | ||||
|             field=models.UUIDField( | ||||
|                 blank=True, | ||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||
|                 null=True, | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="ldapsource", | ||||
|             name="delete_not_found_objects", | ||||
|             field=models.BooleanField( | ||||
|                 default=False, | ||||
|                 help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="userldapsourceconnection", | ||||
|             name="validated_by", | ||||
|             field=models.UUIDField( | ||||
|                 blank=True, | ||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||
|                 null=True, | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="groupldapsourceconnection", | ||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="userldapsourceconnection", | ||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"), | ||||
|         ), | ||||
|     ] | ||||
| @ -137,14 +137,6 @@ class LDAPSource(Source): | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     delete_not_found_objects = models.BooleanField( | ||||
|         default=False, | ||||
|         help_text=_( | ||||
|             "Delete authentik users and groups which were previously supplied by this source, " | ||||
|             "but are now missing from it." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-source-ldap-form" | ||||
| @ -329,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping): | ||||
|  | ||||
|  | ||||
| class UserLDAPSourceConnection(UserSourceConnection): | ||||
|     validated_by = models.UUIDField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.sources.ldap.api import ( | ||||
| @ -346,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection): | ||||
|     class Meta: | ||||
|         verbose_name = _("User LDAP Source Connection") | ||||
|         verbose_name_plural = _("User LDAP Source Connections") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["validated_by"]), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class GroupLDAPSourceConnection(GroupSourceConnection): | ||||
|     validated_by = models.UUIDField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.sources.ldap.api import ( | ||||
| @ -369,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection): | ||||
|     class Meta: | ||||
|         verbose_name = _("Group LDAP Source Connection") | ||||
|         verbose_name_plural = _("Group LDAP Source Connections") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["validated_by"]), | ||||
|         ] | ||||
|  | ||||
| @ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger | ||||
| from authentik.core.sources.mapper import SourceMapper | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.sync.mapper import PropertyMappingManager | ||||
| from authentik.sources.ldap.models import LDAPSource, flatten | ||||
| from authentik.sources.ldap.models import LDAPSource | ||||
|  | ||||
|  | ||||
| class BaseLDAPSynchronizer: | ||||
| @ -77,16 +77,6 @@ class BaseLDAPSynchronizer: | ||||
|         """Get objects from LDAP, implemented in subclass""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def get_attributes(self, object): | ||||
|         if "attributes" not in object: | ||||
|             return | ||||
|         return object.get("attributes", {}) | ||||
|  | ||||
|     def get_identifier(self, attributes: dict): | ||||
|         if not attributes.get(self._source.object_uniqueness_field): | ||||
|             return | ||||
|         return flatten(attributes[self._source.object_uniqueness_field]) | ||||
|  | ||||
|     def search_paginator(  # noqa: PLR0913 | ||||
|         self, | ||||
|         search_base, | ||||
|  | ||||
| @ -1,61 +0,0 @@ | ||||
| from collections.abc import Generator | ||||
| from itertools import batched | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from ldap3 import SUBTREE | ||||
|  | ||||
| from authentik.core.models import Group | ||||
| from authentik.sources.ldap.models import GroupLDAPSourceConnection | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE | ||||
|  | ||||
|  | ||||
| class GroupLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||
|     """Delete LDAP Groups from authentik""" | ||||
|  | ||||
|     @staticmethod | ||||
|     def name() -> str: | ||||
|         return "group_deletions" | ||||
|  | ||||
|     def get_objects(self, **kwargs) -> Generator: | ||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||
|             self.message("Group syncing is disabled for this Source") | ||||
|             return iter(()) | ||||
|  | ||||
|         uuid = uuid4() | ||||
|         groups = self._source.connection().extend.standard.paged_search( | ||||
|             search_base=self.base_dn_groups, | ||||
|             search_filter=self._source.group_object_filter, | ||||
|             search_scope=SUBTREE, | ||||
|             attributes=[self._source.object_uniqueness_field], | ||||
|             generator=True, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False): | ||||
|             identifiers = [] | ||||
|             for group in batch: | ||||
|                 if not (attributes := self.get_attributes(group)): | ||||
|                     continue | ||||
|                 if identifier := self.get_identifier(attributes): | ||||
|                     identifiers.append(identifier) | ||||
|             GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||
|                 validated_by=uuid | ||||
|             ) | ||||
|  | ||||
|         return batched( | ||||
|             GroupLDAPSourceConnection.objects.filter(source=self._source) | ||||
|             .exclude(validated_by=uuid) | ||||
|             .values_list("group", flat=True) | ||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||
|             DELETE_CHUNK_SIZE, | ||||
|             strict=False, | ||||
|         ) | ||||
|  | ||||
|     def sync(self, group_pks: tuple) -> int: | ||||
|         """Delete authentik groups""" | ||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||
|             self.message("Group syncing is disabled for this Source") | ||||
|             return -1 | ||||
|         self._logger.debug("Deleting groups", group_pks=group_pks) | ||||
|         _, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete() | ||||
|         return deleted_per_type.get(Group._meta.label, 0) | ||||
| @ -1,63 +0,0 @@ | ||||
| from collections.abc import Generator | ||||
| from itertools import batched | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from ldap3 import SUBTREE | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.sources.ldap.models import UserLDAPSourceConnection | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
|  | ||||
| UPDATE_CHUNK_SIZE = 10_000 | ||||
| DELETE_CHUNK_SIZE = 50 | ||||
|  | ||||
|  | ||||
| class UserLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||
|     """Delete LDAP Users from authentik""" | ||||
|  | ||||
|     @staticmethod | ||||
|     def name() -> str: | ||||
|         return "user_deletions" | ||||
|  | ||||
|     def get_objects(self, **kwargs) -> Generator: | ||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||
|             self.message("User syncing is disabled for this Source") | ||||
|             return iter(()) | ||||
|  | ||||
|         uuid = uuid4() | ||||
|         users = self._source.connection().extend.standard.paged_search( | ||||
|             search_base=self.base_dn_users, | ||||
|             search_filter=self._source.user_object_filter, | ||||
|             search_scope=SUBTREE, | ||||
|             attributes=[self._source.object_uniqueness_field], | ||||
|             generator=True, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False): | ||||
|             identifiers = [] | ||||
|             for user in batch: | ||||
|                 if not (attributes := self.get_attributes(user)): | ||||
|                     continue | ||||
|                 if identifier := self.get_identifier(attributes): | ||||
|                     identifiers.append(identifier) | ||||
|             UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||
|                 validated_by=uuid | ||||
|             ) | ||||
|  | ||||
|         return batched( | ||||
|             UserLDAPSourceConnection.objects.filter(source=self._source) | ||||
|             .exclude(validated_by=uuid) | ||||
|             .values_list("user", flat=True) | ||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||
|             DELETE_CHUNK_SIZE, | ||||
|             strict=False, | ||||
|         ) | ||||
|  | ||||
|     def sync(self, user_pks: tuple) -> int: | ||||
|         """Delete authentik users""" | ||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||
|             self.message("User syncing is disabled for this Source") | ||||
|             return -1 | ||||
|         self._logger.debug("Deleting users", user_pks=user_pks) | ||||
|         _, deleted_per_type = User.objects.filter(pk__in=user_pks).delete() | ||||
|         return deleted_per_type.get(User._meta.label, 0) | ||||
| @ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|             return -1 | ||||
|         group_count = 0 | ||||
|         for group in page_data: | ||||
|             if (attributes := self.get_attributes(group)) is None: | ||||
|             if "attributes" not in group: | ||||
|                 continue | ||||
|             attributes = group.get("attributes", {}) | ||||
|             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) | ||||
|             if not (uniq := self.get_identifier(attributes)): | ||||
|             if not attributes.get(self._source.object_uniqueness_field): | ||||
|                 self.message( | ||||
|                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", | ||||
|                     attributes=attributes.keys(), | ||||
|                     dn=group_dn, | ||||
|                 ) | ||||
|                 continue | ||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||
|             try: | ||||
|                 defaults = { | ||||
|                     k: flatten(v) | ||||
|  | ||||
| @ -63,9 +63,9 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|                     group_member_dn = group_member.get("dn", {}) | ||||
|                     members.append(group_member_dn) | ||||
|             else: | ||||
|                 if (attributes := self.get_attributes(group)) is None: | ||||
|                 if "attributes" not in group: | ||||
|                     continue | ||||
|                 members = attributes.get(self._source.group_membership_field, []) | ||||
|                 members = group.get("attributes", {}).get(self._source.group_membership_field, []) | ||||
|  | ||||
|             ak_group = self.get_group(group) | ||||
|             if not ak_group: | ||||
|  | ||||
| @ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|             return -1 | ||||
|         user_count = 0 | ||||
|         for user in page_data: | ||||
|             if (attributes := self.get_attributes(user)) is None: | ||||
|             if "attributes" not in user: | ||||
|                 continue | ||||
|             attributes = user.get("attributes", {}) | ||||
|             user_dn = flatten(user.get("entryDN", user.get("dn"))) | ||||
|             if not (uniq := self.get_identifier(attributes)): | ||||
|             if not attributes.get(self._source.object_uniqueness_field): | ||||
|                 self.message( | ||||
|                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", | ||||
|                     attributes=attributes.keys(), | ||||
|                     dn=user_dn, | ||||
|                 ) | ||||
|                 continue | ||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||
|             try: | ||||
|                 defaults = { | ||||
|                     k: flatten(v) | ||||
|  | ||||
| @ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.sources.ldap.models import LDAPSource | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion | ||||
| from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion | ||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||
| @ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None): | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task( | ||||
|     # We take the configured hours timeout time by 3.5 as we run user and | ||||
|     # group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, | ||||
|     # We take the configured hours timeout time by 2.5 as we run user and | ||||
|     # group in parallel and then membership, so 2x is to cover the serial tasks, | ||||
|     # and 0.5x on top of that to give some more leeway | ||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||
| ) | ||||
| def ldap_sync_single(source_pk: str): | ||||
|     """Sync a single source""" | ||||
| @ -81,25 +79,6 @@ def ldap_sync_single(source_pk: str): | ||||
|             group( | ||||
|                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), | ||||
|             ), | ||||
|             # Finally, deletions. What we'd really like to do here is something like | ||||
|             # ``` | ||||
|             # user_identifiers = <ldap query> | ||||
|             # User.objects.exclude( | ||||
|             #     usersourceconnection__identifier__in=user_uniqueness_identifiers, | ||||
|             # ).delete() | ||||
|             # ``` | ||||
|             # This runs into performance issues in large installations. So instead we spread the | ||||
|             # work out into three steps: | ||||
|             # 1. Get every object from the LDAP source. | ||||
|             # 2. Mark every object as "safe" in the database. This is quick, but any error could | ||||
|             #    mean deleting users which should not be deleted, so we do it immediately, in | ||||
|             #    large chunks, and only queue the deletion step afterwards. | ||||
|             # 3. Delete every unmarked item. This is slow, so we spread it over many tasks in | ||||
|             #    small chunks. | ||||
|             group( | ||||
|                 ldap_sync_paginator(source, UserLDAPForwardDeletion) | ||||
|                 + ldap_sync_paginator(source, GroupLDAPForwardDeletion), | ||||
|             ), | ||||
|         ) | ||||
|         task() | ||||
|  | ||||
|  | ||||
| @ -2,33 +2,6 @@ | ||||
|  | ||||
| from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | ||||
|  | ||||
| # The mock modifies these in place, so we have to define them per string | ||||
| user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io" | ||||
| user_in_slapd_cn = "user_in_slapd_cn" | ||||
| user_in_slapd_uid = "user_in_slapd_uid" | ||||
| user_in_slapd_object_class = "person" | ||||
| user_in_slapd = { | ||||
|     "dn": user_in_slapd_dn, | ||||
|     "attributes": { | ||||
|         "cn": user_in_slapd_cn, | ||||
|         "uid": user_in_slapd_uid, | ||||
|         "objectClass": user_in_slapd_object_class, | ||||
|     }, | ||||
| } | ||||
| group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io" | ||||
| group_in_slapd_cn = "group_in_slapd_cn" | ||||
| group_in_slapd_uid = "group_in_slapd_uid" | ||||
| group_in_slapd_object_class = "groupOfNames" | ||||
| group_in_slapd = { | ||||
|     "dn": group_in_slapd_dn, | ||||
|     "attributes": { | ||||
|         "cn": group_in_slapd_cn, | ||||
|         "uid": group_in_slapd_uid, | ||||
|         "objectClass": group_in_slapd_object_class, | ||||
|         "member": [user_in_slapd["dn"]], | ||||
|     }, | ||||
| } | ||||
|  | ||||
|  | ||||
| def mock_slapd_connection(password: str) -> Connection: | ||||
|     """Create mock SLAPD connection""" | ||||
| @ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection: | ||||
|             "objectClass": "posixAccount", | ||||
|         }, | ||||
|     ) | ||||
|     # Known user and group | ||||
|     connection.strategy.add_entry( | ||||
|         user_in_slapd["dn"], | ||||
|         user_in_slapd["attributes"], | ||||
|     ) | ||||
|     connection.strategy.add_entry( | ||||
|         group_in_slapd["dn"], | ||||
|         group_in_slapd["attributes"], | ||||
|     ) | ||||
|     connection.bind() | ||||
|     return connection | ||||
|  | ||||
| @ -13,26 +13,14 @@ from authentik.events.system_tasks import TaskStatus | ||||
| from authentik.lib.generators import generate_id, generate_key | ||||
| from authentik.lib.sync.outgoing.exceptions import StopSync | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
| from authentik.sources.ldap.models import ( | ||||
|     GroupLDAPSourceConnection, | ||||
|     LDAPSource, | ||||
|     LDAPSourcePropertyMapping, | ||||
|     UserLDAPSourceConnection, | ||||
| ) | ||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE | ||||
| from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping | ||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||
| from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | ||||
| from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | ||||
| from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | ||||
| from authentik.sources.ldap.tests.mock_slapd import ( | ||||
|     group_in_slapd_cn, | ||||
|     group_in_slapd_uid, | ||||
|     mock_slapd_connection, | ||||
|     user_in_slapd_cn, | ||||
|     user_in_slapd_uid, | ||||
| ) | ||||
| from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection | ||||
|  | ||||
| LDAP_PASSWORD = generate_key() | ||||
|  | ||||
| @ -320,160 +308,3 @@ class LDAPSyncTests(TestCase): | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|  | ||||
|     def test_user_deletion(self): | ||||
|         """Test user deletion""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertFalse(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_user_deletion_still_in_source(self): | ||||
|         """Test that user is not deleted if it's still in the source""" | ||||
|         username = user_in_slapd_cn | ||||
|         identifier = user_in_slapd_uid | ||||
|         user = User.objects.create_user(username=username) | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier=identifier | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username=username).exists()) | ||||
|  | ||||
|     def test_user_deletion_no_sync(self): | ||||
|         """Test that user is not deleted if sync_users is False""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.sync_users = False | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_user_deletion_no_delete(self): | ||||
|         """Test that user is not deleted if delete_not_found_objects is False""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion(self): | ||||
|         """Test group deletion""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertFalse(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion_still_in_source(self): | ||||
|         """Test that group is not deleted if it's still in the source""" | ||||
|         groupname = group_in_slapd_cn | ||||
|         identifier = group_in_slapd_uid | ||||
|         group = Group.objects.create(name=groupname) | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier=identifier | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name=groupname).exists()) | ||||
|  | ||||
|     def test_group_deletion_no_sync(self): | ||||
|         """Test that group is not deleted if sync_groups is False""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.sync_groups = False | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion_no_delete(self): | ||||
|         """Test that group is not deleted if delete_not_found_objects is False""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_batch_deletion(self): | ||||
|         """Test batch deletion""" | ||||
|         BATCH_SIZE = DELETE_CHUNK_SIZE + 1 | ||||
|         for i in range(BATCH_SIZE): | ||||
|             user = User.objects.create_user(username=f"not-in-the-source-{i}") | ||||
|             group = Group.objects.create(name=f"not-in-the-source-{i}") | ||||
|             group.users.add(user) | ||||
|             UserLDAPSourceConnection.objects.create( | ||||
|                 user=user, source=self.source, identifier=f"not-in-the-source-{i}-user" | ||||
|             ) | ||||
|             GroupLDAPSourceConnection.objects.create( | ||||
|                 group=group, source=self.source, identifier=f"not-in-the-source-{i}-group" | ||||
|             ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|  | ||||
|         self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists()) | ||||
|         self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists()) | ||||
|  | ||||
| @ -9,7 +9,6 @@ from django.http.response import HttpResponseBadRequest | ||||
| from django.shortcuts import get_object_or_404, redirect | ||||
| from django.utils.decorators import method_decorator | ||||
| from django.utils.http import urlencode | ||||
| from django.utils.translation import gettext as _ | ||||
| from django.views import View | ||||
| from django.views.decorators.csrf import csrf_exempt | ||||
| from structlog.stdlib import get_logger | ||||
| @ -129,9 +128,7 @@ class InitiateView(View): | ||||
|         # otherwise we default to POST_AUTO, with direct redirect | ||||
|         if source.binding_type == SAMLBindingTypes.POST: | ||||
|             injected_stages.append(in_memory_stage(ConsentStageView)) | ||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _( | ||||
|                 "Continue to {source_name}".format(source_name=source.name) | ||||
|             ) | ||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}" | ||||
|         injected_stages.append(in_memory_stage(AutosubmitStageView)) | ||||
|         return self.handle_login_flow( | ||||
|             source, | ||||
|  | ||||
| @ -97,8 +97,7 @@ class GroupsView(SCIMObjectView): | ||||
|                     self.logger.warning("Invalid group member", exc=exc) | ||||
|                     continue | ||||
|                 query |= Q(uuid=member.value) | ||||
|             if query: | ||||
|                 group.users.set(User.objects.filter(query)) | ||||
|             group.users.set(User.objects.filter(query)) | ||||
|         if not connection: | ||||
|             connection, _ = SCIMSourceGroup.objects.get_or_create( | ||||
|                 source=self.source, | ||||
|  | ||||
| @ -4,8 +4,6 @@ from uuid import uuid4 | ||||
|  | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext as _ | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| @ -49,11 +47,6 @@ class ConsentChallengeResponse(ChallengeResponse): | ||||
|     component = CharField(default="ak-stage-consent") | ||||
|     token = CharField(required=True) | ||||
|  | ||||
|     def validate_token(self, token: str): | ||||
|         if token != self.stage.executor.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||
|             raise ValidationError(_("Invalid consent token, re-showing prompt")) | ||||
|         return token | ||||
|  | ||||
|  | ||||
| class ConsentStageView(ChallengeStageView): | ||||
|     """Simple consent checker.""" | ||||
| @ -127,6 +120,9 @@ class ConsentStageView(ChallengeStageView): | ||||
|         return super().get(request, *args, **kwargs) | ||||
|  | ||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: | ||||
|         if response.data["token"] != self.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||
|             self.logger.info("Invalid consent token, re-showing prompt") | ||||
|             return self.get(self.request) | ||||
|         if self.should_always_prompt(): | ||||
|             return self.executor.stage_ok() | ||||
|         current_stage: ConsentStage = self.executor.current_stage | ||||
|  | ||||
| @ -17,7 +17,6 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | ||||
| from authentik.stages.consent.stage import ( | ||||
|     PLAN_CONTEXT_CONSENT_HEADER, | ||||
|     PLAN_CONTEXT_CONSENT_PERMISSIONS, | ||||
|     SESSION_KEY_CONSENT_TOKEN, | ||||
| ) | ||||
| @ -34,40 +33,6 @@ class TestConsentStage(FlowTestCase): | ||||
|             slug=generate_id(), | ||||
|         ) | ||||
|  | ||||
|     def test_mismatched_token(self): | ||||
|         """Test incorrect token""" | ||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
|         stage = ConsentStage.objects.create(name=generate_id(), mode=ConsentMode.ALWAYS_REQUIRE) | ||||
|         binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2) | ||||
|  | ||||
|         plan = FlowPlan(flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()]) | ||||
|         session = self.client.session | ||||
|         session[SESSION_KEY_PLAN] = plan | ||||
|         session.save() | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|         session = self.client.session | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||
|             { | ||||
|                 "token": generate_id(), | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             flow, | ||||
|             component="ak-stage-consent", | ||||
|             response_errors={ | ||||
|                 "token": [{"string": "Invalid consent token, re-showing prompt", "code": "invalid"}] | ||||
|             }, | ||||
|         ) | ||||
|         self.assertFalse(UserConsent.objects.filter(user=self.user).exists()) | ||||
|  | ||||
|     def test_always_required(self): | ||||
|         """Test always required consent""" | ||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
| @ -193,7 +158,6 @@ class TestConsentStage(FlowTestCase): | ||||
|             context={ | ||||
|                 PLAN_CONTEXT_APPLICATION: self.application, | ||||
|                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], | ||||
|                 PLAN_CONTEXT_CONSENT_HEADER: "test header", | ||||
|             }, | ||||
|         ) | ||||
|         session = self.client.session | ||||
|  | ||||
| @ -1,38 +0,0 @@ | ||||
| from base64 import b64encode | ||||
| from copy import deepcopy | ||||
| from pickle import dumps  # nosec | ||||
|  | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from authentik.flows.models import FlowToken, in_memory_stage | ||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan | ||||
| from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView | ||||
|  | ||||
|  | ||||
| def pickle_flow_token_for_email(plan: FlowPlan): | ||||
|     """Insert a consent stage into the flow plan and pickle it for a FlowToken, | ||||
|     to be sent via Email. This is to prevent automated email scanners, which sometimes | ||||
|     open links in emails in a full browser from breaking the link.""" | ||||
|     plan_copy = deepcopy(plan) | ||||
|     plan_copy.insert_stage(in_memory_stage(EmailTokenRevocationConsentStageView), index=0) | ||||
|     plan_copy.context[PLAN_CONTEXT_CONSENT_HEADER] = _("Continue to confirm this email address.") | ||||
|     data = dumps(plan_copy) | ||||
|     return b64encode(data).decode() | ||||
|  | ||||
|  | ||||
| class EmailTokenRevocationConsentStageView(ConsentStageView): | ||||
|  | ||||
|     def get(self, request, *args, **kwargs): | ||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||
|         try: | ||||
|             token.refresh_from_db() | ||||
|         except FlowToken.DoesNotExist: | ||||
|             return self.executor.stage_invalid( | ||||
|                 _("Link was already used, please request a new link.") | ||||
|             ) | ||||
|         return super().get(request, *args, **kwargs) | ||||
|  | ||||
|     def challenge_valid(self, response): | ||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||
|         token.delete() | ||||
|         return super().challenge_valid(response) | ||||
| @ -23,7 +23,6 @@ from authentik.flows.stage import ChallengeStageView | ||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -87,8 +86,7 @@ class EmailStageView(ChallengeStageView): | ||||
|                 user=pending_user, | ||||
|                 identifier=identifier, | ||||
|                 flow=self.executor.flow, | ||||
|                 _plan=pickle_flow_token_for_email(self.executor.plan), | ||||
|                 revoke_on_execution=False, | ||||
|                 _plan=FlowToken.pickle(self.executor.plan), | ||||
|             ) | ||||
|         token = tokens.first() | ||||
|         # Check if token is expired and rotate key if so | ||||
|  | ||||
| @ -174,5 +174,5 @@ class TestEmailStageSending(FlowTestCase): | ||||
|                 response = self.client.post(url) | ||||
|             response = self.client.post(url) | ||||
|             self.assertEqual(response.status_code, 200) | ||||
|             self.assertGreaterEqual(len(mail.outbox), 1) | ||||
|             self.assertTrue(len(mail.outbox) >= 1) | ||||
|             self.assertEqual(mail.outbox[0].subject, "authentik") | ||||
|  | ||||
| @ -17,7 +17,6 @@ from authentik.flows.tests import FlowTestCase | ||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.consent.stage import SESSION_KEY_CONSENT_TOKEN | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView | ||||
|  | ||||
| @ -161,17 +160,6 @@ class TestEmailStage(FlowTestCase): | ||||
|                     kwargs={"flow_slug": self.flow.slug}, | ||||
|                 ) | ||||
|             ) | ||||
|             self.assertStageResponse(response, self.flow, component="ak-stage-consent") | ||||
|             response = self.client.post( | ||||
|                 reverse( | ||||
|                     "authentik_api:flow-executor", | ||||
|                     kwargs={"flow_slug": self.flow.slug}, | ||||
|                 ), | ||||
|                 data={ | ||||
|                     "token": self.client.session[SESSION_KEY_CONSENT_TOKEN], | ||||
|                 }, | ||||
|                 follow=True, | ||||
|             ) | ||||
|  | ||||
|             self.assertEqual(response.status_code, 200) | ||||
|             self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
| @ -194,7 +182,6 @@ class TestEmailStage(FlowTestCase): | ||||
|         # Set flow token user to a different user | ||||
|         token: FlowToken = FlowToken.objects.get(user=self.user) | ||||
|         token.user = create_test_admin_user() | ||||
|         token.revoke_on_execution = True | ||||
|         token.save() | ||||
|  | ||||
|         with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): | ||||
|  | ||||
| @ -3921,46 +3921,6 @@ | ||||
|                             } | ||||
|                         } | ||||
|                     }, | ||||
|                     { | ||||
|                         "type": "object", | ||||
|                         "required": [ | ||||
|                             "model", | ||||
|                             "identifiers" | ||||
|                         ], | ||||
|                         "properties": { | ||||
|                             "model": { | ||||
|                                 "const": "authentik_stages_mtls.mutualtlsstage" | ||||
|                             }, | ||||
|                             "id": { | ||||
|                                 "type": "string" | ||||
|                             }, | ||||
|                             "state": { | ||||
|                                 "type": "string", | ||||
|                                 "enum": [ | ||||
|                                     "absent", | ||||
|                                     "present", | ||||
|                                     "created", | ||||
|                                     "must_created" | ||||
|                                 ], | ||||
|                                 "default": "present" | ||||
|                             }, | ||||
|                             "conditions": { | ||||
|                                 "type": "array", | ||||
|                                 "items": { | ||||
|                                     "type": "boolean" | ||||
|                                 } | ||||
|                             }, | ||||
|                             "permissions": { | ||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage_permissions" | ||||
|                             }, | ||||
|                             "attrs": { | ||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage" | ||||
|                             }, | ||||
|                             "identifiers": { | ||||
|                                 "$ref": "#/$defs/model_authentik_stages_mtls.mutualtlsstage" | ||||
|                             } | ||||
|                         } | ||||
|                     }, | ||||
|                     { | ||||
|                         "type": "object", | ||||
|                         "required": [ | ||||
| @ -4907,7 +4867,6 @@ | ||||
|                         "authentik.enterprise.providers.microsoft_entra", | ||||
|                         "authentik.enterprise.providers.ssf", | ||||
|                         "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||
|                         "authentik.enterprise.stages.mtls", | ||||
|                         "authentik.enterprise.stages.source", | ||||
|                         "authentik.events" | ||||
|                     ], | ||||
| @ -5018,7 +4977,6 @@ | ||||
|                         "authentik_providers_microsoft_entra.microsoftentraprovidermapping", | ||||
|                         "authentik_providers_ssf.ssfprovider", | ||||
|                         "authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage", | ||||
|                         "authentik_stages_mtls.mutualtlsstage", | ||||
|                         "authentik_stages_source.sourcestage", | ||||
|                         "authentik_events.event", | ||||
|                         "authentik_events.notificationtransport", | ||||
| @ -7519,11 +7477,6 @@ | ||||
|                             "authentik_stages_invitation.delete_invitationstage", | ||||
|                             "authentik_stages_invitation.view_invitation", | ||||
|                             "authentik_stages_invitation.view_invitationstage", | ||||
|                             "authentik_stages_mtls.add_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.change_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.delete_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.pass_outpost_certificate", | ||||
|                             "authentik_stages_mtls.view_mutualtlsstage", | ||||
|                             "authentik_stages_password.add_passwordstage", | ||||
|                             "authentik_stages_password.change_passwordstage", | ||||
|                             "authentik_stages_password.delete_passwordstage", | ||||
| @ -8180,11 +8133,6 @@ | ||||
|                     "type": "boolean", | ||||
|                     "title": "Lookup groups from user", | ||||
|                     "description": "Lookup group membership based on a user attribute instead of a group attribute. This allows nested group resolution on systems like FreeIPA and Active Directory" | ||||
|                 }, | ||||
|                 "delete_not_found_objects": { | ||||
|                     "type": "boolean", | ||||
|                     "title": "Delete not found objects", | ||||
|                     "description": "Delete authentik users and groups which were previously supplied by this source, but are now missing from it." | ||||
|                 } | ||||
|             }, | ||||
|             "required": [] | ||||
| @ -13474,16 +13422,6 @@ | ||||
|                     "title": "Web certificate", | ||||
|                     "description": "Web Certificate used by the authentik Core webserver." | ||||
|                 }, | ||||
|                 "client_certificates": { | ||||
|                     "type": "array", | ||||
|                     "items": { | ||||
|                         "type": "string", | ||||
|                         "format": "uuid", | ||||
|                         "description": "Certificates used for client authentication." | ||||
|                     }, | ||||
|                     "title": "Client certificates", | ||||
|                     "description": "Certificates used for client authentication." | ||||
|                 }, | ||||
|                 "attributes": { | ||||
|                     "type": "object", | ||||
|                     "additionalProperties": true, | ||||
| @ -14247,11 +14185,6 @@ | ||||
|                             "authentik_stages_invitation.delete_invitationstage", | ||||
|                             "authentik_stages_invitation.view_invitation", | ||||
|                             "authentik_stages_invitation.view_invitationstage", | ||||
|                             "authentik_stages_mtls.add_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.change_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.delete_mutualtlsstage", | ||||
|                             "authentik_stages_mtls.pass_outpost_certificate", | ||||
|                             "authentik_stages_mtls.view_mutualtlsstage", | ||||
|                             "authentik_stages_password.add_passwordstage", | ||||
|                             "authentik_stages_password.change_passwordstage", | ||||
|                             "authentik_stages_password.delete_passwordstage", | ||||
| @ -15155,161 +15088,6 @@ | ||||
|                 } | ||||
|             } | ||||
|         }, | ||||
|         "model_authentik_stages_mtls.mutualtlsstage": { | ||||
|             "type": "object", | ||||
|             "properties": { | ||||
|                 "name": { | ||||
|                     "type": "string", | ||||
|                     "minLength": 1, | ||||
|                     "title": "Name" | ||||
|                 }, | ||||
|                 "flow_set": { | ||||
|                     "type": "array", | ||||
|                     "items": { | ||||
|                         "type": "object", | ||||
|                         "properties": { | ||||
|                             "name": { | ||||
|                                 "type": "string", | ||||
|                                 "minLength": 1, | ||||
|                                 "title": "Name" | ||||
|                             }, | ||||
|                             "slug": { | ||||
|                                 "type": "string", | ||||
|                                 "maxLength": 50, | ||||
|                                 "minLength": 1, | ||||
|                                 "pattern": "^[-a-zA-Z0-9_]+$", | ||||
|                                 "title": "Slug", | ||||
|                                 "description": "Visible in the URL." | ||||
|                             }, | ||||
|                             "title": { | ||||
|                                 "type": "string", | ||||
|                                 "minLength": 1, | ||||
|                                 "title": "Title", | ||||
|                                 "description": "Shown as the Title in Flow pages." | ||||
|                             }, | ||||
|                             "designation": { | ||||
|                                 "type": "string", | ||||
|                                 "enum": [ | ||||
|                                     "authentication", | ||||
|                                     "authorization", | ||||
|                                     "invalidation", | ||||
|                                     "enrollment", | ||||
|                                     "unenrollment", | ||||
|                                     "recovery", | ||||
|                                     "stage_configuration" | ||||
|                                 ], | ||||
|                                 "title": "Designation", | ||||
|                                 "description": "Decides what this Flow is used for. For example, the Authentication flow is redirect to when an un-authenticated user visits authentik." | ||||
|                             }, | ||||
|                             "policy_engine_mode": { | ||||
|                                 "type": "string", | ||||
|                                 "enum": [ | ||||
|                                     "all", | ||||
|                                     "any" | ||||
|                                 ], | ||||
|                                 "title": "Policy engine mode" | ||||
|                             }, | ||||
|                             "compatibility_mode": { | ||||
|                                 "type": "boolean", | ||||
|                                 "title": "Compatibility mode", | ||||
|                                 "description": "Enable compatibility mode, increases compatibility with password managers on mobile devices." | ||||
|                             }, | ||||
|                             "layout": { | ||||
|                                 "type": "string", | ||||
|                                 "enum": [ | ||||
|                                     "stacked", | ||||
|                                     "content_left", | ||||
|                                     "content_right", | ||||
|                                     "sidebar_left", | ||||
|                                     "sidebar_right" | ||||
|                                 ], | ||||
|                                 "title": "Layout" | ||||
|                             }, | ||||
|                             "denied_action": { | ||||
|                                 "type": "string", | ||||
|                                 "enum": [ | ||||
|                                     "message_continue", | ||||
|                                     "message", | ||||
|                                     "continue" | ||||
|                                 ], | ||||
|                                 "title": "Denied action", | ||||
|                                 "description": "Configure what should happen when a flow denies access to a user." | ||||
|                             } | ||||
|                         }, | ||||
|                         "required": [ | ||||
|                             "name", | ||||
|                             "slug", | ||||
|                             "title", | ||||
|                             "designation" | ||||
|                         ] | ||||
|                     }, | ||||
|                     "title": "Flow set" | ||||
|                 }, | ||||
|                 "mode": { | ||||
|                     "type": "string", | ||||
|                     "enum": [ | ||||
|                         "optional", | ||||
|                         "required" | ||||
|                     ], | ||||
|                     "title": "Mode" | ||||
|                 }, | ||||
|                 "certificate_authorities": { | ||||
|                     "type": "array", | ||||
|                     "items": { | ||||
|                         "type": "string", | ||||
|                         "format": "uuid", | ||||
|                         "description": "Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`." | ||||
|                     }, | ||||
|                     "title": "Certificate authorities", | ||||
|                     "description": "Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`." | ||||
|                 }, | ||||
|                 "cert_attribute": { | ||||
|                     "type": "string", | ||||
|                     "enum": [ | ||||
|                         "subject", | ||||
|                         "common_name", | ||||
|                         "email" | ||||
|                     ], | ||||
|                     "title": "Cert attribute" | ||||
|                 }, | ||||
|                 "user_attribute": { | ||||
|                     "type": "string", | ||||
|                     "enum": [ | ||||
|                         "username", | ||||
|                         "email" | ||||
|                     ], | ||||
|                     "title": "User attribute" | ||||
|                 } | ||||
|             }, | ||||
|             "required": [] | ||||
|         }, | ||||
|         "model_authentik_stages_mtls.mutualtlsstage_permissions": { | ||||
|             "type": "array", | ||||
|             "items": { | ||||
|                 "type": "object", | ||||
|                 "required": [ | ||||
|                     "permission" | ||||
|                 ], | ||||
|                 "properties": { | ||||
|                     "permission": { | ||||
|                         "type": "string", | ||||
|                         "enum": [ | ||||
|                             "pass_outpost_certificate", | ||||
|                             "add_mutualtlsstage", | ||||
|                             "change_mutualtlsstage", | ||||
|                             "delete_mutualtlsstage", | ||||
|                             "view_mutualtlsstage" | ||||
|                         ] | ||||
|                     }, | ||||
|                     "user": { | ||||
|                         "type": "integer" | ||||
|                     }, | ||||
|                     "role": { | ||||
|                         "type": "string" | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         }, | ||||
|         "model_authentik_stages_source.sourcestage": { | ||||
|             "type": "object", | ||||
|             "properties": { | ||||
|  | ||||
| @ -19,6 +19,7 @@ import ( | ||||
| 	sentryutils "goauthentik.io/internal/utils/sentry" | ||||
| 	webutils "goauthentik.io/internal/utils/web" | ||||
| 	"goauthentik.io/internal/web" | ||||
| 	"goauthentik.io/internal/web/brand_tls" | ||||
| ) | ||||
|  | ||||
| var rootCmd = &cobra.Command{ | ||||
| @ -66,12 +67,12 @@ var rootCmd = &cobra.Command{ | ||||
| 		} | ||||
|  | ||||
| 		ws := web.NewWebServer() | ||||
| 		ws.Core().AddHealthyCallback(func() { | ||||
| 		ws.Core().HealthyCallback = func() { | ||||
| 			if config.Get().Outposts.DisableEmbeddedOutpost { | ||||
| 				return | ||||
| 			} | ||||
| 			go attemptProxyStart(ws, u) | ||||
| 		}) | ||||
| 		} | ||||
| 		ws.Start() | ||||
| 		<-ex | ||||
| 		l.Info("shutting down webserver") | ||||
| @ -94,8 +95,13 @@ func attemptProxyStart(ws *web.WebServer, u *url.URL) { | ||||
| 			} | ||||
| 			continue | ||||
| 		} | ||||
| 		// Init brand_tls here too since it requires an API Client, | ||||
| 		// so we just reuse the same one as the outpost uses | ||||
| 		tw := brand_tls.NewWatcher(ac.Client) | ||||
| 		go tw.Start() | ||||
| 		ws.BrandTLS = tw | ||||
| 		ac.AddRefreshHandler(func() { | ||||
| 			ws.BrandTLS.Check() | ||||
| 			tw.Check() | ||||
| 		}) | ||||
|  | ||||
| 		srv := proxyv2.NewProxyServer(ac) | ||||
|  | ||||
							
								
								
									
										2
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								go.mod
									
									
									
									
									
								
							| @ -27,7 +27,7 @@ require ( | ||||
| 	github.com/spf13/cobra v1.9.1 | ||||
| 	github.com/stretchr/testify v1.10.0 | ||||
| 	github.com/wwt/guac v1.3.2 | ||||
| 	goauthentik.io/api/v3 v3.2025041.2 | ||||
| 	goauthentik.io/api/v3 v3.2025041.1 | ||||
| 	golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab | ||||
| 	golang.org/x/oauth2 v0.30.0 | ||||
| 	golang.org/x/sync v0.14.0 | ||||
|  | ||||
							
								
								
									
										4
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								go.sum
									
									
									
									
									
								
							| @ -290,8 +290,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y | ||||
| go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= | ||||
| go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= | ||||
| go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= | ||||
| goauthentik.io/api/v3 v3.2025041.2 h1:vFYYnhcDcxL95RczZwhzt3i4LptFXMvIRN+vgf8sQYg= | ||||
| goauthentik.io/api/v3 v3.2025041.2/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw= | ||||
| goauthentik.io/api/v3 v3.2025041.1 h1:GAN6AoTmfnCGgx1SyM07jP4/LR/T3rkTEyShSBd3Co8= | ||||
| goauthentik.io/api/v3 v3.2025041.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw= | ||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||
| golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||
| golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||
|  | ||||
| @ -21,14 +21,10 @@ func FullVersion() string { | ||||
| 	return ver | ||||
| } | ||||
|  | ||||
| func UserAgentOutpost() string { | ||||
| func OutpostUserAgent() string { | ||||
| 	return fmt.Sprintf("goauthentik.io/outpost/%s", FullVersion()) | ||||
| } | ||||
|  | ||||
| func UserAgentIPC() string { | ||||
| 	return fmt.Sprintf("goauthentik.io/ipc/%s", FullVersion()) | ||||
| } | ||||
|  | ||||
| func UserAgent() string { | ||||
| 	return fmt.Sprintf("authentik@%s", FullVersion()) | ||||
| } | ||||
|  | ||||
| @ -18,8 +18,8 @@ import ( | ||||
| ) | ||||
|  | ||||
| type GoUnicorn struct { | ||||
| 	Healthcheck      func() bool | ||||
| 	healthyCallbacks []func() | ||||
| 	Healthcheck     func() bool | ||||
| 	HealthyCallback func() | ||||
|  | ||||
| 	log     *log.Entry | ||||
| 	p       *exec.Cmd | ||||
| @ -32,12 +32,12 @@ type GoUnicorn struct { | ||||
| func New(healthcheck func() bool) *GoUnicorn { | ||||
| 	logger := log.WithField("logger", "authentik.router.unicorn") | ||||
| 	g := &GoUnicorn{ | ||||
| 		Healthcheck:      healthcheck, | ||||
| 		log:              logger, | ||||
| 		started:          false, | ||||
| 		killed:           false, | ||||
| 		alive:            false, | ||||
| 		healthyCallbacks: []func(){}, | ||||
| 		Healthcheck:     healthcheck, | ||||
| 		log:             logger, | ||||
| 		started:         false, | ||||
| 		killed:          false, | ||||
| 		alive:           false, | ||||
| 		HealthyCallback: func() {}, | ||||
| 	} | ||||
| 	g.initCmd() | ||||
| 	c := make(chan os.Signal, 1) | ||||
| @ -79,10 +79,6 @@ func (g *GoUnicorn) initCmd() { | ||||
| 	g.p.Stderr = os.Stderr | ||||
| } | ||||
|  | ||||
| func (g *GoUnicorn) AddHealthyCallback(cb func()) { | ||||
| 	g.healthyCallbacks = append(g.healthyCallbacks, cb) | ||||
| } | ||||
|  | ||||
| func (g *GoUnicorn) IsRunning() bool { | ||||
| 	return g.alive | ||||
| } | ||||
| @ -105,9 +101,7 @@ func (g *GoUnicorn) healthcheck() { | ||||
| 		if g.Healthcheck() { | ||||
| 			g.alive = true | ||||
| 			g.log.Debug("backend is alive, backing off with healthchecks") | ||||
| 			for _, cb := range g.healthyCallbacks { | ||||
| 				cb() | ||||
| 			} | ||||
| 			g.HealthyCallback() | ||||
| 			break | ||||
| 		} | ||||
| 		g.log.Debug("backend not alive yet") | ||||
|  | ||||
| @ -62,7 +62,7 @@ func NewAPIController(akURL url.URL, token string) *APIController { | ||||
| 	apiConfig.Scheme = akURL.Scheme | ||||
| 	apiConfig.HTTPClient = &http.Client{ | ||||
| 		Transport: web.NewUserAgentTransport( | ||||
| 			constants.UserAgentOutpost(), | ||||
| 			constants.OutpostUserAgent(), | ||||
| 			web.NewTracingTransport( | ||||
| 				rsp.Context(), | ||||
| 				GetTLSTransport(), | ||||
|  | ||||
| @ -38,7 +38,7 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID string) error { | ||||
|  | ||||
| 	header := http.Header{ | ||||
| 		"Authorization": []string{authHeader}, | ||||
| 		"User-Agent":    []string{constants.UserAgentOutpost()}, | ||||
| 		"User-Agent":    []string{constants.OutpostUserAgent()}, | ||||
| 	} | ||||
|  | ||||
| 	dialer := websocket.Dialer{ | ||||
|  | ||||
| @ -3,8 +3,6 @@ package ak | ||||
| import ( | ||||
| 	"context" | ||||
| 	"crypto/tls" | ||||
| 	"crypto/x509" | ||||
| 	"encoding/pem" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| 	"goauthentik.io/api/v3" | ||||
| @ -69,34 +67,16 @@ func (cs *CryptoStore) Fetch(uuid string) error { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	var tcert tls.Certificate | ||||
| 	if key.Data != "" { | ||||
| 		x509cert, err := tls.X509KeyPair([]byte(cert.Data), []byte(key.Data)) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		tcert = x509cert | ||||
| 	} else { | ||||
| 		p, _ := pem.Decode([]byte(cert.Data)) | ||||
| 		x509cert, err := x509.ParseCertificate(p.Bytes) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		tcert = tls.Certificate{ | ||||
| 			Certificate: [][]byte{x509cert.Raw}, | ||||
| 			Leaf:        x509cert, | ||||
| 		} | ||||
| 	x509cert, err := tls.X509KeyPair([]byte(cert.Data), []byte(key.Data)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	cs.certificates[uuid] = &tcert | ||||
| 	cs.certificates[uuid] = &x509cert | ||||
| 	cs.fingerprints[uuid] = cfp | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (cs *CryptoStore) Get(uuid string) *tls.Certificate { | ||||
| 	c, ok := cs.certificates[uuid] | ||||
| 	if ok { | ||||
| 		return c | ||||
| 	} | ||||
| 	err := cs.Fetch(uuid) | ||||
| 	if err != nil { | ||||
| 		cs.log.WithError(err).Warning("failed to fetch certificate") | ||||
|  | ||||
| @ -55,7 +55,7 @@ func doGlobalSetup(outpost api.Outpost, globalConfig *api.Config) { | ||||
| 			EnableTracing: true, | ||||
| 			TracesSampler: sentryutils.SamplerFunc(float64(globalConfig.ErrorReporting.TracesSampleRate)), | ||||
| 			Release:       fmt.Sprintf("authentik@%s", constants.VERSION), | ||||
| 			HTTPTransport: webutils.NewUserAgentTransport(constants.UserAgentOutpost(), http.DefaultTransport), | ||||
| 			HTTPTransport: webutils.NewUserAgentTransport(constants.OutpostUserAgent(), http.DefaultTransport), | ||||
| 			IgnoreErrors: []string{ | ||||
| 				http.ErrAbortHandler.Error(), | ||||
| 			}, | ||||
|  | ||||
| @ -61,7 +61,7 @@ func NewFlowExecutor(ctx context.Context, flowSlug string, refConfig *api.Config | ||||
| 		l.WithError(err).Warning("Failed to create cookiejar") | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	transport := web.NewUserAgentTransport(constants.UserAgentOutpost(), web.NewTracingTransport(rsp.Context(), ak.GetTLSTransport())) | ||||
| 	transport := web.NewUserAgentTransport(constants.OutpostUserAgent(), web.NewTracingTransport(rsp.Context(), ak.GetTLSTransport())) | ||||
| 	fe := &FlowExecutor{ | ||||
| 		Params:    url.Values{}, | ||||
| 		Answers:   make(map[StageComponent]string), | ||||
|  | ||||
| @ -28,18 +28,16 @@ func NewSessionBinder(si server.LDAPServerInstance, oldBinder bind.Binder) *Sess | ||||
| 		si:  si, | ||||
| 		log: log.WithField("logger", "authentik.outpost.ldap.binder.session"), | ||||
| 	} | ||||
| 	if oldBinder != nil { | ||||
| 		if oldSb, ok := oldBinder.(*SessionBinder); ok { | ||||
| 			sb.DirectBinder = oldSb.DirectBinder | ||||
| 			sb.sessions = oldSb.sessions | ||||
| 			sb.log.Debug("re-initialised session binder") | ||||
| 			return sb | ||||
| 		} | ||||
| 	if oldSb, ok := oldBinder.(*SessionBinder); ok { | ||||
| 		sb.DirectBinder = oldSb.DirectBinder | ||||
| 		sb.sessions = oldSb.sessions | ||||
| 		sb.log.Debug("re-initialised session binder") | ||||
| 	} else { | ||||
| 		sb.sessions = ttlcache.New(ttlcache.WithDisableTouchOnHit[Credentials, ldap.LDAPResultCode]()) | ||||
| 		sb.DirectBinder = *direct.NewDirectBinder(si) | ||||
| 		go sb.sessions.Start() | ||||
| 		sb.log.Debug("initialised session binder") | ||||
| 	} | ||||
| 	sb.sessions = ttlcache.New(ttlcache.WithDisableTouchOnHit[Credentials, ldap.LDAPResultCode]()) | ||||
| 	sb.DirectBinder = *direct.NewDirectBinder(si) | ||||
| 	go sb.sessions.Start() | ||||
| 	sb.log.Debug("initialised session binder") | ||||
| 	return sb | ||||
| } | ||||
|  | ||||
|  | ||||
| @ -16,7 +16,6 @@ import ( | ||||
| 	memorybind "goauthentik.io/internal/outpost/ldap/bind/memory" | ||||
| 	"goauthentik.io/internal/outpost/ldap/constants" | ||||
| 	"goauthentik.io/internal/outpost/ldap/flags" | ||||
| 	"goauthentik.io/internal/outpost/ldap/search" | ||||
| 	directsearch "goauthentik.io/internal/outpost/ldap/search/direct" | ||||
| 	memorysearch "goauthentik.io/internal/outpost/ldap/search/memory" | ||||
| ) | ||||
| @ -86,11 +85,7 @@ func (ls *LDAPServer) Refresh() error { | ||||
| 			providers[idx].certUUID = *kp | ||||
| 		} | ||||
| 		if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_CACHED { | ||||
| 			var oldSearcher search.Searcher | ||||
| 			if existing != nil { | ||||
| 				oldSearcher = existing.searcher | ||||
| 			} | ||||
| 			providers[idx].searcher = memorysearch.NewMemorySearcher(providers[idx], oldSearcher) | ||||
| 			providers[idx].searcher = memorysearch.NewMemorySearcher(providers[idx]) | ||||
| 		} else if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_DIRECT { | ||||
| 			providers[idx].searcher = directsearch.NewDirectSearcher(providers[idx]) | ||||
| 		} | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	