Compare commits
	
		
			161 Commits
		
	
	
		
			dependabot
			...
			better-ver
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| bbd639c37d | |||
| d14b480926 | |||
| d9c79558b1 | |||
| ed20d1b6aa | |||
| f03ee47bb3 | |||
| 396366a99a | |||
| 13d2df3bf6 | |||
| d65b8ae029 | |||
| 296031c5df | |||
| 452639d6d2 | |||
| 465ccb7ab9 | |||
| fdce812ddc | |||
| 005da84dbe | |||
| b098971718 | |||
| 147bfa3f97 | |||
| fc5f91ea29 | |||
| e29961b088 | |||
| 52ca70d6bb | |||
| 42cb9cb531 | |||
| 837cd3bcb0 | |||
| 53a9c147cd | |||
| d7f166f260 | |||
| 8ce9343457 | |||
| 6af27d0c90 | |||
| 6fd48ccf9b | |||
| 5567967848 | |||
| 090a377c78 | |||
| 3e7bda87ea | |||
| f22a539b50 | |||
| 54811b2b05 | |||
| 35263f71ee | |||
| f0bc809389 | |||
| 75b45312ee | |||
| e4eeb43f8a | |||
| 04850e5c84 | |||
| fbae9f2f34 | |||
| 3c966d9252 | |||
| 9f1cef18b2 | |||
| aae20dc399 | |||
| 4dc43b788a | |||
| a3b40a97ef | |||
| 852106f02f | |||
| 7a34428aff | |||
| c1b6a681a0 | |||
| 7a8c2e7ad9 | |||
| 5c131fec36 | |||
| a575de21bc | |||
| 02275584a6 | |||
| 27268d533c | |||
| 0dba4b61f5 | |||
| c4d4512818 | |||
| a57381ca4a | |||
| 154dde9a9a | |||
| a15365a9f1 | |||
| 10f11cbc31 | |||
| caec23d52a | |||
| 7e1781ed76 | |||
| 0cfdbbbec6 | |||
| 8a1b7cb166 | |||
| f367a84676 | |||
| 32d6b03a3c | |||
| 08027bf0ad | |||
| 8c02b25677 | |||
| 160f137707 | |||
| 52c35fab06 | |||
| 69a07c1c88 | |||
| 691a0d66ee | |||
| 3f4328bf2a | |||
| b945552b7c | |||
| 5347b85c9f | |||
| fb2401cf9e | |||
| b161315811 | |||
| 0fa2267b86 | |||
| 4bbdddb876 | |||
| bca9c0965e | |||
| dd58b5044e | |||
| c4f081cb68 | |||
| 59aad31459 | |||
| de9db3cb83 | |||
| 24eb5fcda9 | |||
| 556ae6a5cb | |||
| a479d9c1d8 | |||
| b8bb969ee7 | |||
| 7d361e4734 | |||
| dc7c7686a3 | |||
| 94b4977397 | |||
| 7f822e1cb7 | |||
| fb3ec1f38b | |||
| 87505517ee | |||
| 4c5fe84f92 | |||
| 5faa224c81 | |||
| 736da3abef | |||
| 52d90f8d3b | |||
| 7b812de977 | |||
| a4bd2cc263 | |||
| 14038ba8d2 | |||
| eaff59b6b0 | |||
| cb702ca07a | |||
| cb0bfb0dad | |||
| bf46d5c916 | |||
| 59e686c8b9 | |||
| 9e736f2838 | |||
| c2dd3d9c1b | |||
| 42302d3187 | |||
| 20ccabf3ec | |||
| 8f939fa577 | |||
| 2519bcef89 | |||
| 3e3615a859 | |||
| 79e82c8dc9 | |||
| ccd4432e1f | |||
| b3137f5307 | |||
| 2591ed9840 | |||
| b3e89ef570 | |||
| 45b48c5cd6 | |||
| 1eefd834fc | |||
| 4cc6ed97c5 | |||
| bb55d9b3de | |||
| 3972afb865 | |||
| 04a013cc1b | |||
| fb396f7737 | |||
| cf120ff3ff | |||
| 3e4923d52e | |||
| 01793088f0 | |||
| e2bf2ec2cc | |||
| 4dfbe28709 | |||
| b2021a7191 | |||
| 81e5fb0c18 | |||
| a2a2d940a8 | |||
| c034930219 | |||
| da3dc51d87 | |||
| d217a39513 | |||
| 7729a9317c | |||
| be5f5dd3f0 | |||
| bed8d5da4b | |||
| 4f70f84e80 | |||
| 97b8551866 | |||
| 9a0b67e700 | |||
| 97e4c89cec | |||
| 65aedde8f7 | |||
| 17450f23bf | |||
| ab3ad6b7fd | |||
| 45bc3cbd41 | |||
| 9c1bcac6af | |||
| 0a133265c5 | |||
| 57f25a97c9 | |||
| 8f32242787 | |||
| c4bb19051d | |||
| 10f4fae711 | |||
| 2d9eab3f60 | |||
| fa66195619 | |||
| 134eb126b6 | |||
| f5a6136a58 | |||
| 1a82dfcd61 | |||
| 61fc1dc1fb | |||
| 1f921cc18e | |||
| 2f94ee3f1f | |||
| 154fba12e0 | |||
| 0d18c1d797 | |||
| e905dd52d8 | |||
| 245126a1c3 | |||
| 15d84d30ba | 
| @ -1,36 +0,0 @@ | |||||||
| [bumpversion] |  | ||||||
| current_version = 2025.4.1 |  | ||||||
| tag = True |  | ||||||
| commit = True |  | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? |  | ||||||
| serialize =  |  | ||||||
| 	{major}.{minor}.{patch}-{rc_t}{rc_n} |  | ||||||
| 	{major}.{minor}.{patch} |  | ||||||
| message = release: {new_version} |  | ||||||
| tag_name = version/{new_version} |  | ||||||
|  |  | ||||||
| [bumpversion:part:rc_t] |  | ||||||
| values =  |  | ||||||
| 	rc |  | ||||||
| 	final |  | ||||||
| optional_value = final |  | ||||||
|  |  | ||||||
| [bumpversion:file:pyproject.toml] |  | ||||||
|  |  | ||||||
| [bumpversion:file:uv.lock] |  | ||||||
|  |  | ||||||
| [bumpversion:file:package.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] |  | ||||||
|  |  | ||||||
| [bumpversion:file:schema.yml] |  | ||||||
|  |  | ||||||
| [bumpversion:file:blueprints/schema.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:authentik/__init__.py] |  | ||||||
|  |  | ||||||
| [bumpversion:file:internal/constants/constants.go] |  | ||||||
|  |  | ||||||
| [bumpversion:file:web/src/common/constants.ts] |  | ||||||
|  |  | ||||||
| [bumpversion:file:lifecycle/aws/template.yaml] |  | ||||||
| @ -1,13 +1,9 @@ | |||||||
| """Helper script to get the actual branch name, docker safe""" | """Helper script to get the actual branch name, docker safe""" | ||||||
|  |  | ||||||
| import configparser |  | ||||||
| import os | import os | ||||||
|  | from importlib.metadata import version as package_version | ||||||
| from json import dumps | from json import dumps | ||||||
| from time import time | from time import time | ||||||
|  |  | ||||||
| parser = configparser.ConfigParser() |  | ||||||
| parser.read(".bumpversion.cfg") |  | ||||||
|  |  | ||||||
| # Decide if we should push the image or not | # Decide if we should push the image or not | ||||||
| should_push = True | should_push = True | ||||||
| if len(os.environ.get("DOCKER_USERNAME", "")) < 1: | if len(os.environ.get("DOCKER_USERNAME", "")) < 1: | ||||||
| @ -31,7 +27,7 @@ is_release = "dev" not in image_names[0] | |||||||
| sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA") | sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA") | ||||||
|  |  | ||||||
| # 2042.1.0 or 2042.1.0-rc1 | # 2042.1.0 or 2042.1.0-rc1 | ||||||
| version = parser.get("bumpversion", "current_version") | version = package_version("authentik") | ||||||
| # 2042.1 | # 2042.1 | ||||||
| version_family = ".".join(version.split("-", 1)[0].split(".")[:-1]) | version_family = ".".join(version.split("-", 1)[0].split(".")[:-1]) | ||||||
| prerelease = "-" in version | prerelease = "-" in version | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | |||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: ScribeMD/docker-cache@0.5.0 |       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|  | |||||||
| @ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Download uv | # Stage 5: Download uv | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.8 AS uv | FROM ghcr.io/astral-sh/uv:0.7.11 AS uv | ||||||
| # Stage 6: Base python image | # Stage 6: Base python image | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								Makefile
									
									
									
									
									
								
							| @ -57,7 +57,7 @@ migrate: ## Run the Authentik Django server's migrations | |||||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||||
|  |  | ||||||
| aws-cfn: | aws-cfn: | ||||||
| 	cd lifecycle/aws && npm run aws-cfn | 	cd lifecycle/aws && npm i && npm run aws-cfn | ||||||
|  |  | ||||||
| run:  ## Run the main authentik server process | run:  ## Run the main authentik server process | ||||||
| 	uv run ak server | 	uv run ak server | ||||||
| @ -86,6 +86,15 @@ dev-create-db: | |||||||
|  |  | ||||||
| dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | ||||||
|  |  | ||||||
|  | bump: | ||||||
|  | 	uv version $(version) | ||||||
|  | 	$(MAKE) gen-build | ||||||
|  | 	$(MAKE) gen-compose | ||||||
|  | 	$(MAKE) aws-cfn | ||||||
|  | 	npm version --no-git-tag-version --allow-same-version $(version) | ||||||
|  | 	cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version) | ||||||
|  | 	echo $(version) > ${PWD}/internal/constants/VERSION | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## API Schema | ## API Schema | ||||||
| ######################### | ######################### | ||||||
| @ -100,6 +109,9 @@ gen-build:  ## Extract the schema from the database | |||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak spectacular --file schema.yml | 		uv run ak spectacular --file schema.yml | ||||||
|  |  | ||||||
|  | gen-compose: | ||||||
|  | 	uv run scripts/generate_docker_compose.py | ||||||
|  |  | ||||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||||
| 	npx prettier --write changelog.md | 	npx prettier --write changelog.md | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
| | 2025.2.x  | ✅        | |  | ||||||
| | 2025.4.x  | ✅        | | | 2025.4.x  | ✅        | | ||||||
|  | | 2025.6.x  | ✅        | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,20 +1,28 @@ | |||||||
| """authentik root module""" | """authentik root module""" | ||||||
|  |  | ||||||
|  | from functools import lru_cache | ||||||
|  | from importlib.metadata import version | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.4.1" |  | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_build_hash(fallback: str | None = None) -> str: | @lru_cache | ||||||
|  | def authentik_version() -> str: | ||||||
|  |     return version("authentik") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @lru_cache | ||||||
|  | def authentik_build_hash(fallback: str | None = None) -> str: | ||||||
|     """Get build hash""" |     """Get build hash""" | ||||||
|     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") |     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") | ||||||
|     return fallback if build_hash == "" and fallback else build_hash |     return fallback if build_hash == "" and fallback else build_hash | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_full_version() -> str: | @lru_cache | ||||||
|  | def authentik_full_version() -> str: | ||||||
|     """Get full version, with build hash appended""" |     """Get full version, with build hash appended""" | ||||||
|     version = __version__ |     version = authentik_version() | ||||||
|     if (build_hash := get_build_hash()) != "": |     if (build_hash := authentik_build_hash()) != "": | ||||||
|         return f"{version}+{build_hash}" |         return f"{version}+{build_hash}" | ||||||
|     return version |     return version | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.enterprise.license import LicenseKey | from authentik.enterprise.license import LicenseKey | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| @ -78,7 +78,7 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|         """Get versions""" |         """Get versions""" | ||||||
|         return { |         return { | ||||||
|             "architecture": platform.machine(), |             "architecture": platform.machine(), | ||||||
|             "authentik_version": get_full_version(), |             "authentik_version": authentik_full_version(), | ||||||
|             "environment": get_env(), |             "environment": get_env(), | ||||||
|             "openssl_fips_enabled": ( |             "openssl_fips_enabled": ( | ||||||
|                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None |                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| """authentik administration overview""" | """authentik administration overview""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django_tenants.utils import get_public_schema_name | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| @ -9,10 +10,11 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import authentik_build_hash, authentik_version | ||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
|  | from authentik.tenants.utils import get_current_tenant | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -27,18 +29,20 @@ class VersionSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     def get_build_hash(self, _) -> str: |     def get_build_hash(self, _) -> str: | ||||||
|         """Get build hash, if version is not latest or released""" |         """Get build hash, if version is not latest or released""" | ||||||
|         return get_build_hash() |         return authentik_build_hash() | ||||||
|  |  | ||||||
|     def get_version_current(self, _) -> str: |     def get_version_current(self, _) -> str: | ||||||
|         """Get current version""" |         """Get current version""" | ||||||
|         return __version__ |         return authentik_version() | ||||||
|  |  | ||||||
|     def get_version_latest(self, _) -> str: |     def get_version_latest(self, _) -> str: | ||||||
|         """Get latest version from cache""" |         """Get latest version from cache""" | ||||||
|  |         if get_current_tenant().schema_name == get_public_schema_name(): | ||||||
|  |             return __version__ | ||||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) |         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||||
|         if not version_in_cache:  # pragma: no cover |         if not version_in_cache:  # pragma: no cover | ||||||
|             update_latest_version.delay() |             update_latest_version.delay() | ||||||
|             return __version__ |             return authentik_version() | ||||||
|         return version_in_cache |         return version_in_cache | ||||||
|  |  | ||||||
|     def get_version_latest_valid(self, _) -> bool: |     def get_version_latest_valid(self, _) -> bool: | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.rbac.permissions import HasPermission | from authentik.rbac.permissions import HasPermission | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| @ -34,7 +34,7 @@ class WorkerView(APIView): | |||||||
|     def get(self, request: Request) -> Response: |     def get(self, request: Request) -> Response: | ||||||
|         """Get currently connected worker count.""" |         """Get currently connected worker count.""" | ||||||
|         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) |         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||||
|         our_version = parse(get_full_version()) |         our_version = parse(authentik_full_version()) | ||||||
|         response = [] |         response = [] | ||||||
|         for worker in raw: |         for worker in raw: | ||||||
|             key = list(worker.keys())[0] |             key = list(worker.keys())[0] | ||||||
| @ -50,7 +50,7 @@ class WorkerView(APIView): | |||||||
|             response.append( |             response.append( | ||||||
|                 { |                 { | ||||||
|                     "worker_id": f"authentik-debug@{gethostname()}", |                     "worker_id": f"authentik-debug@{gethostname()}", | ||||||
|                     "version": get_full_version(), |                     "version": authentik_full_version(), | ||||||
|                     "version_matching": True, |                     "version_matching": True, | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|  | |||||||
| @ -14,3 +14,19 @@ class AuthentikAdminConfig(ManagedAppConfig): | |||||||
|     label = "authentik_admin" |     label = "authentik_admin" | ||||||
|     verbose_name = "authentik Admin" |     verbose_name = "authentik Admin" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|  |     @ManagedAppConfig.reconcile_global | ||||||
|  |     def clear_update_notifications(self): | ||||||
|  |         """Clear update notifications on startup if the notification was for the version | ||||||
|  |         we're running now.""" | ||||||
|  |         from packaging.version import parse | ||||||
|  |  | ||||||
|  |         from authentik.admin.tasks import LOCAL_VERSION | ||||||
|  |         from authentik.events.models import EventAction, Notification | ||||||
|  |  | ||||||
|  |         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||||
|  |             if "new_version" not in notification.event.context: | ||||||
|  |                 continue | ||||||
|  |             notification_version = notification.event.context["new_version"] | ||||||
|  |             if LOCAL_VERSION >= parse(notification_version): | ||||||
|  |                 notification.delete() | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| """authentik admin settings""" | """authentik admin settings""" | ||||||
|  |  | ||||||
| from celery.schedules import crontab | from celery.schedules import crontab | ||||||
|  | from django_tenants.utils import get_public_schema_name | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand | from authentik.lib.utils.time import fqdn_rand | ||||||
|  |  | ||||||
| @ -8,6 +9,7 @@ CELERY_BEAT_SCHEDULE = { | |||||||
|     "admin_latest_version": { |     "admin_latest_version": { | ||||||
|         "task": "authentik.admin.tasks.update_latest_version", |         "task": "authentik.admin.tasks.update_latest_version", | ||||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), |         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||||
|  |         "tenant_schemas": [get_public_schema_name()], | ||||||
|         "options": {"queue": "authentik_scheduled"}, |         "options": {"queue": "authentik_scheduled"}, | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | |||||||
| @ -4,7 +4,7 @@ from django.dispatch import receiver | |||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from prometheus_client import Gauge | from prometheus_client import Gauge | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.root.monitoring import monitoring_set | from authentik.root.monitoring import monitoring_set | ||||||
|  |  | ||||||
| @ -15,7 +15,7 @@ GAUGE_WORKERS = Gauge( | |||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| _version = parse(get_full_version()) | _version = parse(authentik_full_version()) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(monitoring_set) | @receiver(monitoring_set) | ||||||
|  | |||||||
| @ -1,15 +1,14 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError |  | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import authentik_build_hash, authentik_version | ||||||
| from authentik.admin.apps import PROM_INFO | from authentik.admin.apps import PROM_INFO | ||||||
| from authentik.events.models import Event, EventAction, Notification | from authentik.events.models import Event, EventAction | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| @ -19,34 +18,20 @@ LOGGER = get_logger() | |||||||
| VERSION_NULL = "0.0.0" | VERSION_NULL = "0.0.0" | ||||||
| VERSION_CACHE_KEY = "authentik_latest_version" | VERSION_CACHE_KEY = "authentik_latest_version" | ||||||
| VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | ||||||
| LOCAL_VERSION = parse(__version__) | LOCAL_VERSION = parse(authentik_version()) | ||||||
|  |  | ||||||
|  |  | ||||||
| def _set_prom_info(): | def _set_prom_info(): | ||||||
|     """Set prometheus info for version""" |     """Set prometheus info for version""" | ||||||
|     PROM_INFO.info( |     PROM_INFO.info( | ||||||
|         { |         { | ||||||
|             "version": __version__, |             "version": authentik_version(), | ||||||
|             "latest": cache.get(VERSION_CACHE_KEY, ""), |             "latest": cache.get(VERSION_CACHE_KEY, ""), | ||||||
|             "build_hash": get_build_hash(), |             "build_hash": authentik_build_hash(), | ||||||
|         } |         } | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task( |  | ||||||
|     throws=(DatabaseError, ProgrammingError, InternalError), |  | ||||||
| ) |  | ||||||
| def clear_update_notifications(): |  | ||||||
|     """Clear update notifications on startup if the notification was for the version |  | ||||||
|     we're running now.""" |  | ||||||
|     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): |  | ||||||
|         if "new_version" not in notification.event.context: |  | ||||||
|             continue |  | ||||||
|         notification_version = notification.event.context["new_version"] |  | ||||||
|         if LOCAL_VERSION >= parse(notification_version): |  | ||||||
|             notification.delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) | @CELERY_APP.task(bind=True, base=SystemTask) | ||||||
| @prefill_task | @prefill_task | ||||||
| def update_latest_version(self: SystemTask): | def update_latest_version(self: SystemTask): | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ from json import loads | |||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.blueprints.tests import reconcile_app | from authentik.blueprints.tests import reconcile_app | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| @ -27,7 +27,7 @@ class TestAdminAPI(TestCase): | |||||||
|         response = self.client.get(reverse("authentik_api:admin_version")) |         response = self.client.get(reverse("authentik_api:admin_version")) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         body = loads(response.content) |         body = loads(response.content) | ||||||
|         self.assertEqual(body["version_current"], __version__) |         self.assertEqual(body["version_current"], authentik_version()) | ||||||
|  |  | ||||||
|     def test_workers(self): |     def test_workers(self): | ||||||
|         """Test Workers API""" |         """Test Workers API""" | ||||||
|  | |||||||
| @ -1,12 +1,12 @@ | |||||||
| """test admin tasks""" | """test admin tasks""" | ||||||
|  |  | ||||||
|  | from django.apps import apps | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from requests_mock import Mocker | from requests_mock import Mocker | ||||||
|  |  | ||||||
| from authentik.admin.tasks import ( | from authentik.admin.tasks import ( | ||||||
|     VERSION_CACHE_KEY, |     VERSION_CACHE_KEY, | ||||||
|     clear_update_notifications, |  | ||||||
|     update_latest_version, |     update_latest_version, | ||||||
| ) | ) | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| @ -72,12 +72,13 @@ class TestAdminTasks(TestCase): | |||||||
|  |  | ||||||
|     def test_clear_update_notifications(self): |     def test_clear_update_notifications(self): | ||||||
|         """Test clear of previous notification""" |         """Test clear of previous notification""" | ||||||
|  |         admin_config = apps.get_app_config("authentik_admin") | ||||||
|         Event.objects.create( |         Event.objects.create( | ||||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} |             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||||
|         ) |         ) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||||
|         clear_update_notifications() |         admin_config.clear_update_notifications() | ||||||
|         self.assertFalse( |         self.assertFalse( | ||||||
|             Event.objects.filter( |             Event.objects.filter( | ||||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" |                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||||
|  | |||||||
| @ -1,12 +1,13 @@ | |||||||
| """authentik API AppConfig""" | """authentik API AppConfig""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikAPIConfig(AppConfig): | class AuthentikAPIConfig(ManagedAppConfig): | ||||||
|     """authentik API Config""" |     """authentik API Config""" | ||||||
|  |  | ||||||
|     name = "authentik.api" |     name = "authentik.api" | ||||||
|     label = "authentik_api" |     label = "authentik_api" | ||||||
|     mountpoint = "api/" |     mountpoint = "api/" | ||||||
|     verbose_name = "authentik API" |     verbose_name = "authentik API" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -11,7 +11,7 @@ from rest_framework.relations import PrimaryKeyRelatedField | |||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.blueprints.v1.common import BlueprintEntryDesiredState | from authentik.blueprints.v1.common import BlueprintEntryDesiredState | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | ||||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||||
| @ -48,7 +48,7 @@ class Command(BaseCommand): | |||||||
|             "$schema": "http://json-schema.org/draft-07/schema", |             "$schema": "http://json-schema.org/draft-07/schema", | ||||||
|             "$id": "https://goauthentik.io/blueprints/schema.json", |             "$id": "https://goauthentik.io/blueprints/schema.json", | ||||||
|             "type": "object", |             "type": "object", | ||||||
|             "title": f"authentik {__version__} Blueprint schema", |             "title": f"authentik {authentik_version()} Blueprint schema", | ||||||
|             "required": ["version", "entries"], |             "required": ["version", "entries"], | ||||||
|             "properties": { |             "properties": { | ||||||
|                 "version": { |                 "version": { | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								authentik/blueprints/tests/test_managed_app_config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								authentik/blueprints/tests/test_managed_app_config.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,14 @@ | |||||||
|  | from django.test import TestCase | ||||||
|  |  | ||||||
|  | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  | from authentik.enterprise.apps import EnterpriseConfig | ||||||
|  | from authentik.lib.utils.reflection import get_apps | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestManagedAppConfig(TestCase): | ||||||
|  |     def test_apps_use_managed_app_config(self): | ||||||
|  |         for app in get_apps(): | ||||||
|  |             if app.name.startswith("authentik.enterprise"): | ||||||
|  |                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) | ||||||
|  |             else: | ||||||
|  |                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) | ||||||
| @ -1,9 +1,9 @@ | |||||||
| """authentik brands app""" | """authentik brands app""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBrandsConfig(AppConfig): | class AuthentikBrandsConfig(ManagedAppConfig): | ||||||
|     """authentik Brand app""" |     """authentik Brand app""" | ||||||
|  |  | ||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
| @ -12,3 +12,4 @@ class AuthentikBrandsConfig(AppConfig): | |||||||
|     mountpoints = { |     mountpoints = { | ||||||
|         "authentik.brands.urls_root": "", |         "authentik.brands.urls_root": "", | ||||||
|     } |     } | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -6,7 +6,7 @@ from django.db.models import F, Q | |||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.lib.sentry import get_http_meta | from authentik.lib.sentry import get_http_meta | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
| @ -36,5 +36,5 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "html_meta": {**get_http_meta()}, | ||||||
|         "version": get_full_version(), |         "version": authentik_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
| @ -84,6 +84,7 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
|  | from authentik.stages.email.flow import pickle_flow_token_for_email | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -451,7 +452,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self) -> tuple[str, Token]: |     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -473,12 +474,16 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|  |         _plan = FlowToken.pickle(plan) | ||||||
|  |         if for_email: | ||||||
|  |             _plan = pickle_flow_token_for_email(plan) | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": FlowToken.pickle(plan), |                 "_plan": _plan, | ||||||
|  |                 "revoke_on_execution": not for_email, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -648,7 +653,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link() |         link, token = self._create_recovery_link(for_email=True) | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
|  | |||||||
| @ -11,7 +11,7 @@ from django.core.management.base import BaseCommand | |||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.db.models.signals import post_save, pre_delete | from django.db.models.signals import post_save, pre_delete | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.events.middleware import should_log_model | from authentik.events.middleware import should_log_model | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| @ -19,7 +19,7 @@ from authentik.events.utils import model_to_dict | |||||||
|  |  | ||||||
|  |  | ||||||
| def get_banner_text(shell_type="shell") -> str: | def get_banner_text(shell_type="shell") -> str: | ||||||
|     return f"""### authentik {shell_type} ({get_full_version()}) |     return f"""### authentik {shell_type} ({authentik_full_version()}) | ||||||
| ### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """ | ### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -79,6 +79,7 @@ def _migrate_session( | |||||||
|         AuthenticatedSession.objects.using(db_alias).create( |         AuthenticatedSession.objects.using(db_alias).create( | ||||||
|             session=session, |             session=session, | ||||||
|             user=old_auth_session.user, |             user=old_auth_session.user, | ||||||
|  |             uuid=old_auth_session.uuid, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,10 +1,81 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||||
|  |  | ||||||
| from django.apps.registry import Apps | from django.apps.registry import Apps, apps as global_apps | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  | from django.contrib.contenttypes.management import create_contenttypes | ||||||
|  | from django.contrib.auth.management import create_permissions | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|  |     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" | ||||||
|  |     db_alias = schema_editor.connection.alias | ||||||
|  |  | ||||||
|  |     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the | ||||||
|  |     # real config for creating permissions and content types | ||||||
|  |     authentik_core_config = global_apps.get_app_config("authentik_core") | ||||||
|  |     # These are only ran by django after all migrations, but we need them right now. | ||||||
|  |     # `global_apps` is needed, | ||||||
|  |     create_permissions(authentik_core_config, using=db_alias, verbosity=1) | ||||||
|  |     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) | ||||||
|  |  | ||||||
|  |     # But from now on, this is just a regular migration, so use `apps` | ||||||
|  |     Permission = apps.get_model("auth", "Permission") | ||||||
|  |     ContentType = apps.get_model("contenttypes", "ContentType") | ||||||
|  |  | ||||||
|  |     try: | ||||||
|  |         old_ct = ContentType.objects.using(db_alias).get( | ||||||
|  |             app_label="authentik_core", model="oldauthenticatedsession" | ||||||
|  |         ) | ||||||
|  |         new_ct = ContentType.objects.using(db_alias).get( | ||||||
|  |             app_label="authentik_core", model="authenticatedsession" | ||||||
|  |         ) | ||||||
|  |     except ContentType.DoesNotExist: | ||||||
|  |         # This should exist at this point, but if not, let's cut our losses | ||||||
|  |         return | ||||||
|  |  | ||||||
|  |     # Get all permissions for the old content type | ||||||
|  |     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) | ||||||
|  |  | ||||||
|  |     # Create equivalent permissions for the new content type | ||||||
|  |     for old_perm in old_perms: | ||||||
|  |         new_perm = ( | ||||||
|  |             Permission.objects.using(db_alias) | ||||||
|  |             .filter( | ||||||
|  |                 content_type=new_ct, | ||||||
|  |                 codename=old_perm.codename, | ||||||
|  |             ) | ||||||
|  |             .first() | ||||||
|  |         ) | ||||||
|  |         if not new_perm: | ||||||
|  |             # This should exist at this point, but if not, let's cut our losses | ||||||
|  |             continue | ||||||
|  |  | ||||||
|  |         # Global user permissions | ||||||
|  |         User = apps.get_model("authentik_core", "User") | ||||||
|  |         User.user_permissions.through.objects.using(db_alias).filter( | ||||||
|  |             permission=old_perm | ||||||
|  |         ).all().update(permission=new_perm) | ||||||
|  |  | ||||||
|  |         # Global role permissions | ||||||
|  |         DjangoGroup = apps.get_model("auth", "Group") | ||||||
|  |         DjangoGroup.permissions.through.objects.using(db_alias).filter( | ||||||
|  |             permission=old_perm | ||||||
|  |         ).all().update(permission=new_perm) | ||||||
|  |  | ||||||
|  |         # Object user permissions | ||||||
|  |         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") | ||||||
|  |         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||||
|  |             permission=new_perm, content_type=new_ct | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # Object role permissions | ||||||
|  |         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") | ||||||
|  |         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||||
|  |             permission=new_perm, content_type=new_ct | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( | def remove_old_authenticated_session_content_type( | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||||
| ): | ): | ||||||
| @ -21,7 +92,12 @@ class Migration(migrations.Migration): | |||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|  |         migrations.RunPython( | ||||||
|  |             code=migrate_authenticated_session_permissions, | ||||||
|  |             reverse_code=migrations.RunPython.noop, | ||||||
|  |         ), | ||||||
|         migrations.RunPython( |         migrations.RunPython( | ||||||
|             code=remove_old_authenticated_session_content_type, |             code=remove_old_authenticated_session_content_type, | ||||||
|  |             reverse_code=migrations.RunPython.noop, | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -3,7 +3,7 @@ | |||||||
| from django import template | from django import template | ||||||
| from django.templatetags.static import static as static_loader | from django.templatetags.static import static as static_loader | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
|  |  | ||||||
| register = template.Library() | register = template.Library() | ||||||
|  |  | ||||||
| @ -11,4 +11,4 @@ register = template.Library() | |||||||
| @register.simple_tag() | @register.simple_tag() | ||||||
| def versioned_script(path: str) -> str: | def versioned_script(path: str) -> str: | ||||||
|     """Wrapper around {% static %} tag that supports setting the version""" |     """Wrapper around {% static %} tag that supports setting the version""" | ||||||
|     return static_loader(path.replace("%v", get_full_version())) |     return static_loader(path.replace("%v", authentik_full_version())) | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ from django.utils.translation import gettext as _ | |||||||
| from django.views.generic.base import RedirectView, TemplateView | from django.views.generic.base import RedirectView, TemplateView | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
|  |  | ||||||
| from authentik import get_build_hash | from authentik import authentik_build_hash | ||||||
| from authentik.admin.tasks import LOCAL_VERSION | from authentik.admin.tasks import LOCAL_VERSION | ||||||
| from authentik.api.v3.config import ConfigView | from authentik.api.v3.config import ConfigView | ||||||
| from authentik.brands.api import CurrentBrandSerializer | from authentik.brands.api import CurrentBrandSerializer | ||||||
| @ -50,7 +50,7 @@ class InterfaceView(TemplateView): | |||||||
|         kwargs["brand_json"] = dumps(CurrentBrandSerializer(self.request.brand).data) |         kwargs["brand_json"] = dumps(CurrentBrandSerializer(self.request.brand).data) | ||||||
|         kwargs["version_family"] = f"{LOCAL_VERSION.major}.{LOCAL_VERSION.minor}" |         kwargs["version_family"] = f"{LOCAL_VERSION.major}.{LOCAL_VERSION.minor}" | ||||||
|         kwargs["version_subdomain"] = f"version-{LOCAL_VERSION.major}-{LOCAL_VERSION.minor}" |         kwargs["version_subdomain"] = f"version-{LOCAL_VERSION.major}-{LOCAL_VERSION.minor}" | ||||||
|         kwargs["build"] = get_build_hash() |         kwargs["build"] = authentik_build_hash() | ||||||
|         kwargs["url_kwargs"] = self.kwargs |         kwargs["url_kwargs"] = self.kwargs | ||||||
|         kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/")) |         kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/")) | ||||||
|         kwargs["base_url_rel"] = CONFIG.get("web.path", "/") |         kwargs["base_url_rel"] = CONFIG.get("web.path", "/") | ||||||
|  | |||||||
| @ -12,7 +12,7 @@ from cryptography.x509.oid import NameOID | |||||||
| from django.db import models | from django.db import models | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -85,7 +85,7 @@ class CertificateBuilder: | |||||||
|             .issuer_name( |             .issuer_name( | ||||||
|                 x509.Name( |                 x509.Name( | ||||||
|                     [ |                     [ | ||||||
|                         x509.NameAttribute(NameOID.COMMON_NAME, f"authentik {__version__}"), |                         x509.NameAttribute(NameOID.COMMON_NAME, f"authentik {authentik_version()}"), | ||||||
|                     ] |                     ] | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ from requests import RequestException | |||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.brands.utils import DEFAULT_BRAND | from authentik.brands.utils import DEFAULT_BRAND | ||||||
| from authentik.core.middleware import ( | from authentik.core.middleware import ( | ||||||
| @ -473,7 +473,7 @@ class NotificationTransport(SerializerModel): | |||||||
|                     "title": notification.body, |                     "title": notification.body, | ||||||
|                     "color": "#fd4b2d", |                     "color": "#fd4b2d", | ||||||
|                     "fields": fields, |                     "fields": fields, | ||||||
|                     "footer": f"authentik {get_full_version()}", |                     "footer": f"authentik {authentik_full_version()}", | ||||||
|                 } |                 } | ||||||
|             ], |             ], | ||||||
|         } |         } | ||||||
|  | |||||||
| @ -7,7 +7,7 @@ from django.core.mail.backends.locmem import EmailBackend | |||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from requests_mock import Mocker | from requests_mock import Mocker | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
| from authentik.events.models import ( | from authentik.events.models import ( | ||||||
|     Event, |     Event, | ||||||
| @ -118,7 +118,7 @@ class TestEventTransports(TestCase): | |||||||
|                                 {"short": True, "title": "Event user", "value": self.user.username}, |                                 {"short": True, "title": "Event user", "value": self.user.username}, | ||||||
|                                 {"title": "foo", "value": "bar,"}, |                                 {"title": "foo", "value": "bar,"}, | ||||||
|                             ], |                             ], | ||||||
|                             "footer": f"authentik {get_full_version()}", |                             "footer": f"authentik {authentik_full_version()}", | ||||||
|                         } |                         } | ||||||
|                     ], |                     ], | ||||||
|                 }, |                 }, | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ from django.core.management.base import BaseCommand | |||||||
| from django.test import RequestFactory | from django.test import RequestFactory | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
| from authentik.flows.models import Flow | from authentik.flows.models import Flow | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner | from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner | ||||||
| @ -99,7 +99,7 @@ class Command(BaseCommand): | |||||||
|         total_min: int = min(min(inner) for inner in values) |         total_min: int = min(min(inner) for inner in values) | ||||||
|         total_avg = sum(sum(inner) for inner in values) / sum(len(inner) for inner in values) |         total_avg = sum(sum(inner) for inner in values) / sum(len(inner) for inner in values) | ||||||
|  |  | ||||||
|         print(f"Version: {__version__}") |         print(f"Version: {authentik_version()}") | ||||||
|         print(f"Processes: {len(values)}") |         print(f"Processes: {len(values)}") | ||||||
|         print(f"\tMax: {total_max * 100}ms") |         print(f"\tMax: {total_max * 100}ms") | ||||||
|         print(f"\tMin: {total_min * 100}ms") |         print(f"\tMin: {total_min * 100}ms") | ||||||
|  | |||||||
| @ -0,0 +1,18 @@ | |||||||
|  | # Generated by Django 5.1.9 on 2025-05-27 12:52 | ||||||
|  |  | ||||||
|  | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|  |     dependencies = [ | ||||||
|  |         ("authentik_flows", "0027_auto_20231028_1424"), | ||||||
|  |     ] | ||||||
|  |  | ||||||
|  |     operations = [ | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="flowtoken", | ||||||
|  |             name="revoke_on_execution", | ||||||
|  |             field=models.BooleanField(default=True), | ||||||
|  |         ), | ||||||
|  |     ] | ||||||
| @ -303,9 +303,10 @@ class FlowToken(Token): | |||||||
|  |  | ||||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) |     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||||
|     _plan = models.TextField() |     _plan = models.TextField() | ||||||
|  |     revoke_on_execution = models.BooleanField(default=True) | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def pickle(plan) -> str: |     def pickle(plan: "FlowPlan") -> str: | ||||||
|         """Pickle into string""" |         """Pickle into string""" | ||||||
|         data = dumps(plan) |         data = dumps(plan) | ||||||
|         return b64encode(data).decode() |         return b64encode(data).decode() | ||||||
|  | |||||||
| @ -99,9 +99,10 @@ class ChallengeStageView(StageView): | |||||||
|             self.logger.debug("Got StageInvalidException", exc=exc) |             self.logger.debug("Got StageInvalidException", exc=exc) | ||||||
|             return self.executor.stage_invalid() |             return self.executor.stage_invalid() | ||||||
|         if not challenge.is_valid(): |         if not challenge.is_valid(): | ||||||
|             self.logger.warning( |             self.logger.error( | ||||||
|                 "f(ch): Invalid challenge", |                 "f(ch): Invalid challenge", | ||||||
|                 errors=challenge.errors, |                 errors=challenge.errors, | ||||||
|  |                 challenge=challenge.data, | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge) |         return HttpChallengeResponse(challenge) | ||||||
|  |  | ||||||
|  | |||||||
| @ -146,6 +146,7 @@ class FlowExecutorView(APIView): | |||||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: |         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) |             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||||
|         finally: |         finally: | ||||||
|  |             if token.revoke_on_execution: | ||||||
|                 token.delete() |                 token.delete() | ||||||
|         if not isinstance(plan, FlowPlan): |         if not isinstance(plan, FlowPlan): | ||||||
|             return None |             return None | ||||||
|  | |||||||
| @ -81,7 +81,6 @@ debugger: false | |||||||
|  |  | ||||||
| log_level: info | log_level: info | ||||||
|  |  | ||||||
| session_storage: cache |  | ||||||
| sessions: | sessions: | ||||||
|   unauthenticated_age: days=1 |   unauthenticated_age: days=1 | ||||||
|  |  | ||||||
|  | |||||||
| @ -31,7 +31,7 @@ from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from websockets.exceptions import WebSocketException | from websockets.exceptions import WebSocketException | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import authentik_build_hash, authentik_version | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.http import authentik_user_agent | from authentik.lib.utils.http import authentik_user_agent | ||||||
| from authentik.lib.utils.reflection import get_env | from authentik.lib.utils.reflection import get_env | ||||||
| @ -78,11 +78,11 @@ def sentry_init(**sentry_init_kwargs): | |||||||
|         ], |         ], | ||||||
|         before_send=before_send, |         before_send=before_send, | ||||||
|         traces_sampler=traces_sampler, |         traces_sampler=traces_sampler, | ||||||
|         release=f"authentik@{__version__}", |         release=f"authentik@{authentik_version()}", | ||||||
|         transport=SentryTransport, |         transport=SentryTransport, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) |     ) | ||||||
|     set_tag("authentik.build_hash", get_build_hash("tagged")) |     set_tag("authentik.build_hash", authentik_build_hash("tagged")) | ||||||
|     set_tag("authentik.env", get_env()) |     set_tag("authentik.env", get_env()) | ||||||
|     set_tag("authentik.component", "backend") |     set_tag("authentik.component", "backend") | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
|  |  | ||||||
|  | from celery import group | ||||||
| from celery.exceptions import Retry | from celery.exceptions import Retry | ||||||
| from celery.result import allow_join_result | from celery.result import allow_join_result | ||||||
| from django.core.paginator import Paginator | from django.core.paginator import Paginator | ||||||
| @ -82,21 +83,41 @@ class SyncTasks: | |||||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) |                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||||
|                 return |                 return | ||||||
|             try: |             try: | ||||||
|                 for page in users_paginator.page_range: |                 messages.append(_("Syncing users")) | ||||||
|                     messages.append(_("Syncing page {page} of users".format(page=page))) |                 user_results = ( | ||||||
|                     for msg in sync_objects.apply_async( |                     group( | ||||||
|  |                         [ | ||||||
|  |                             sync_objects.signature( | ||||||
|                                 args=(class_to_path(User), page, provider_pk), |                                 args=(class_to_path(User), page, provider_pk), | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                                 time_limit=PAGE_TIMEOUT, | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |                                 soft_time_limit=PAGE_TIMEOUT, | ||||||
|                     ).get(): |                             ) | ||||||
|  |                             for page in users_paginator.page_range | ||||||
|  |                         ] | ||||||
|  |                     ) | ||||||
|  |                     .apply_async() | ||||||
|  |                     .get() | ||||||
|  |                 ) | ||||||
|  |                 for result in user_results: | ||||||
|  |                     for msg in result: | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|                 for page in groups_paginator.page_range: |                 messages.append(_("Syncing groups")) | ||||||
|                     messages.append(_("Syncing page {page} of groups".format(page=page))) |                 group_results = ( | ||||||
|                     for msg in sync_objects.apply_async( |                     group( | ||||||
|  |                         [ | ||||||
|  |                             sync_objects.signature( | ||||||
|                                 args=(class_to_path(Group), page, provider_pk), |                                 args=(class_to_path(Group), page, provider_pk), | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                                 time_limit=PAGE_TIMEOUT, | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |                                 soft_time_limit=PAGE_TIMEOUT, | ||||||
|                     ).get(): |                             ) | ||||||
|  |                             for page in groups_paginator.page_range | ||||||
|  |                         ] | ||||||
|  |                     ) | ||||||
|  |                     .apply_async() | ||||||
|  |                     .get() | ||||||
|  |                 ) | ||||||
|  |                 for result in group_results: | ||||||
|  |                     for msg in result: | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|             except TransientSyncException as exc: |             except TransientSyncException as exc: | ||||||
|                 self.logger.warning("transient sync exception", exc=exc) |                 self.logger.warning("transient sync exception", exc=exc) | ||||||
| @ -109,7 +130,7 @@ class SyncTasks: | |||||||
|     def sync_objects( |     def sync_objects( | ||||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter |         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||||
|     ): |     ): | ||||||
|         _object_type = path_to_class(object_type) |         _object_type: type[Model] = path_to_class(object_type) | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
|             provider_type=class_to_path(self._provider_model), |             provider_type=class_to_path(self._provider_model), | ||||||
|             provider_pk=provider_pk, |             provider_pk=provider_pk, | ||||||
| @ -132,6 +153,19 @@ class SyncTasks: | |||||||
|             self.logger.debug("starting discover") |             self.logger.debug("starting discover") | ||||||
|             client.discover() |             client.discover() | ||||||
|         self.logger.debug("starting sync for page", page=page) |         self.logger.debug("starting sync for page", page=page) | ||||||
|  |         messages.append( | ||||||
|  |             asdict( | ||||||
|  |                 LogEvent( | ||||||
|  |                     _( | ||||||
|  |                         "Syncing page {page} of {object_type}".format( | ||||||
|  |                             page=page, object_type=_object_type._meta.verbose_name_plural | ||||||
|  |                         ) | ||||||
|  |                     ), | ||||||
|  |                     log_level="info", | ||||||
|  |                     logger=f"{provider._meta.verbose_name}@{object_type}", | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|         for obj in paginator.page(page).object_list: |         for obj in paginator.page(page).object_list: | ||||||
|             obj: Model |             obj: Model | ||||||
|             try: |             try: | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ from uuid import uuid4 | |||||||
| from requests.sessions import PreparedRequest, Session | from requests.sessions import PreparedRequest, Session | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| @ -13,7 +13,7 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| def authentik_user_agent() -> str: | def authentik_user_agent() -> str: | ||||||
|     """Get a common user agent""" |     """Get a common user agent""" | ||||||
|     return f"authentik@{get_full_version()}" |     return f"authentik@{authentik_full_version()}" | ||||||
|  |  | ||||||
|  |  | ||||||
| class TimeoutSession(Session): | class TimeoutSession(Session): | ||||||
|  | |||||||
| @ -13,7 +13,7 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik import get_build_hash | from authentik import authentik_build_hash | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | ||||||
| @ -194,7 +194,7 @@ class OutpostViewSet(UsedByMixin, ModelViewSet): | |||||||
|                     "openssl_version": state.openssl_version, |                     "openssl_version": state.openssl_version, | ||||||
|                     "fips_enabled": state.fips_enabled, |                     "fips_enabled": state.fips_enabled, | ||||||
|                     "hostname": state.hostname, |                     "hostname": state.hostname, | ||||||
|                     "build_hash_should": get_build_hash(), |                     "build_hash_should": authentik_build_hash(), | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|         return Response(OutpostHealthSerializer(states, many=True).data) |         return Response(OutpostHealthSerializer(states, many=True).data) | ||||||
|  | |||||||
| @ -4,7 +4,7 @@ from dataclasses import dataclass | |||||||
|  |  | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import authentik_build_hash, authentik_version | ||||||
| from authentik.events.logs import LogEvent, capture_logs | from authentik.events.logs import LogEvent, capture_logs | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
| @ -99,6 +99,6 @@ class BaseController: | |||||||
|         image_name_template: str = CONFIG.get("outposts.container_image_base") |         image_name_template: str = CONFIG.get("outposts.container_image_base") | ||||||
|         return image_name_template % { |         return image_name_template % { | ||||||
|             "type": self.outpost.type, |             "type": self.outpost.type, | ||||||
|             "version": __version__, |             "version": authentik_version(), | ||||||
|             "build_hash": get_build_hash(), |             "build_hash": authentik_build_hash(), | ||||||
|         } |         } | ||||||
|  | |||||||
| @ -13,7 +13,7 @@ from paramiko.ssh_exception import SSHException | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from yaml import safe_dump | from yaml import safe_dump | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST | from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||||
| from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException | from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException | ||||||
| @ -185,7 +185,7 @@ class DockerController(BaseController): | |||||||
|         try: |         try: | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         except DockerException:  # pragma: no cover |         except DockerException:  # pragma: no cover | ||||||
|             image = f"ghcr.io/goauthentik/{self.outpost.type}:{__version__}" |             image = f"ghcr.io/goauthentik/{self.outpost.type}:{authentik_version()}" | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         return image |         return image | ||||||
|  |  | ||||||
|  | |||||||
| @ -17,7 +17,7 @@ from requests import Response | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from urllib3.exceptions import HTTPError | from urllib3.exceptions import HTTPError | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST | from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| from authentik.outposts.controllers.base import ControllerException | from authentik.outposts.controllers.base import ControllerException | ||||||
| from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate | from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate | ||||||
| @ -29,8 +29,8 @@ T = TypeVar("T", V1Pod, V1Deployment) | |||||||
|  |  | ||||||
|  |  | ||||||
| def get_version() -> str: | def get_version() -> str: | ||||||
|     """Wrapper for __version__ to make testing easier""" |     """Wrapper for authentik_version() to make testing easier""" | ||||||
|     return __version__ |     return authentik_version() | ||||||
|  |  | ||||||
|  |  | ||||||
| class KubernetesObjectReconciler(Generic[T]): | class KubernetesObjectReconciler(Generic[T]): | ||||||
|  | |||||||
| @ -23,7 +23,7 @@ from kubernetes.client import ( | |||||||
|     V1SecurityContext, |     V1SecurityContext, | ||||||
| ) | ) | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | from authentik.outposts.controllers.base import FIELD_MANAGER | ||||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||||
| from authentik.outposts.controllers.k8s.triggers import NeedsUpdate | from authentik.outposts.controllers.k8s.triggers import NeedsUpdate | ||||||
| @ -94,7 +94,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]): | |||||||
|         meta = self.get_object_meta(name=self.name) |         meta = self.get_object_meta(name=self.name) | ||||||
|         image_name = self.controller.get_container_image() |         image_name = self.controller.get_container_image() | ||||||
|         image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets |         image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets | ||||||
|         version = get_full_version().replace("+", "-") |         version = authentik_full_version().replace("+", "-") | ||||||
|         return V1Deployment( |         return V1Deployment( | ||||||
|             metadata=meta, |             metadata=meta, | ||||||
|             spec=V1DeploymentSpec( |             spec=V1DeploymentSpec( | ||||||
|  | |||||||
| @ -19,7 +19,7 @@ from packaging.version import Version, parse | |||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import authentik_build_hash, authentik_version | ||||||
| from authentik.blueprints.models import ManagedModel | from authentik.blueprints.models import ManagedModel | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
| @ -38,7 +38,7 @@ from authentik.lib.sentry import SentryIgnoredException | |||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.outposts.controllers.k8s.utils import get_namespace | from authentik.outposts.controllers.k8s.utils import get_namespace | ||||||
|  |  | ||||||
| OUR_VERSION = parse(__version__) | OUR_VERSION = parse(authentik_version()) | ||||||
| OUTPOST_HELLO_INTERVAL = 10 | OUTPOST_HELLO_INTERVAL = 10 | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  |  | ||||||
| @ -451,7 +451,7 @@ class OutpostState: | |||||||
|         """Check if outpost version matches our version""" |         """Check if outpost version matches our version""" | ||||||
|         if not self.version: |         if not self.version: | ||||||
|             return False |             return False | ||||||
|         if self.build_hash != get_build_hash(): |         if self.build_hash != authentik_build_hash(): | ||||||
|             return False |             return False | ||||||
|         return parse(self.version) != OUR_VERSION |         return parse(self.version) != OUR_VERSION | ||||||
|  |  | ||||||
|  | |||||||
| @ -8,7 +8,7 @@ from channels.testing import WebsocketCommunicator | |||||||
| from django.contrib.contenttypes.models import ContentType | from django.contrib.contenttypes.models import ContentType | ||||||
| from django.test import TransactionTestCase | from django.test import TransactionTestCase | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.core.tests.utils import create_test_flow | from authentik.core.tests.utils import create_test_flow | ||||||
| from authentik.outposts.consumer import WebsocketMessage, WebsocketMessageInstruction | from authentik.outposts.consumer import WebsocketMessage, WebsocketMessageInstruction | ||||||
| from authentik.outposts.models import Outpost, OutpostType | from authentik.outposts.models import Outpost, OutpostType | ||||||
| @ -73,7 +73,7 @@ class TestOutpostWS(TransactionTestCase): | |||||||
|                 WebsocketMessage( |                 WebsocketMessage( | ||||||
|                     instruction=WebsocketMessageInstruction.HELLO, |                     instruction=WebsocketMessageInstruction.HELLO, | ||||||
|                     args={ |                     args={ | ||||||
|                         "version": __version__, |                         "version": authentik_version(), | ||||||
|                         "buildHash": "foo", |                         "buildHash": "foo", | ||||||
|                         "uuid": "123", |                         "uuid": "123", | ||||||
|                     }, |                     }, | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authentik policy dummy app config""" | """Authentik policy dummy app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPolicyDummyConfig(AppConfig): | class AuthentikPolicyDummyConfig(ManagedAppConfig): | ||||||
|     """Authentik policy_dummy app config""" |     """Authentik policy_dummy app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.dummy" |     name = "authentik.policies.dummy" | ||||||
|     label = "authentik_policies_dummy" |     label = "authentik_policies_dummy" | ||||||
|     verbose_name = "authentik Policies.Dummy" |     verbose_name = "authentik Policies.Dummy" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik Event Matcher policy app config""" | """authentik Event Matcher policy app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPoliciesEventMatcherConfig(AppConfig): | class AuthentikPoliciesEventMatcherConfig(ManagedAppConfig): | ||||||
|     """authentik Event Matcher policy app config""" |     """authentik Event Matcher policy app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.event_matcher" |     name = "authentik.policies.event_matcher" | ||||||
|     label = "authentik_policies_event_matcher" |     label = "authentik_policies_event_matcher" | ||||||
|     verbose_name = "authentik Policies.Event Matcher" |     verbose_name = "authentik Policies.Event Matcher" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authentik policy_expiry app config""" | """Authentik policy_expiry app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPolicyExpiryConfig(AppConfig): | class AuthentikPolicyExpiryConfig(ManagedAppConfig): | ||||||
|     """Authentik policy_expiry app config""" |     """Authentik policy_expiry app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.expiry" |     name = "authentik.policies.expiry" | ||||||
|     label = "authentik_policies_expiry" |     label = "authentik_policies_expiry" | ||||||
|     verbose_name = "authentik Policies.Expiry" |     verbose_name = "authentik Policies.Expiry" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authentik policy_expression app config""" | """Authentik policy_expression app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPolicyExpressionConfig(AppConfig): | class AuthentikPolicyExpressionConfig(ManagedAppConfig): | ||||||
|     """Authentik policy_expression app config""" |     """Authentik policy_expression app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.expression" |     name = "authentik.policies.expression" | ||||||
|     label = "authentik_policies_expression" |     label = "authentik_policies_expression" | ||||||
|     verbose_name = "authentik Policies.Expression" |     verbose_name = "authentik Policies.Expression" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authentik policy geoip app config""" | """Authentik policy geoip app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPolicyGeoIPConfig(AppConfig): | class AuthentikPolicyGeoIPConfig(ManagedAppConfig): | ||||||
|     """Authentik policy_geoip app config""" |     """Authentik policy_geoip app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.geoip" |     name = "authentik.policies.geoip" | ||||||
|     label = "authentik_policies_geoip" |     label = "authentik_policies_geoip" | ||||||
|     verbose_name = "authentik Policies.GeoIP" |     verbose_name = "authentik Policies.GeoIP" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik Password policy app config""" | """authentik Password policy app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikPoliciesPasswordConfig(AppConfig): | class AuthentikPoliciesPasswordConfig(ManagedAppConfig): | ||||||
|     """authentik Password policy app config""" |     """authentik Password policy app config""" | ||||||
|  |  | ||||||
|     name = "authentik.policies.password" |     name = "authentik.policies.password" | ||||||
|     label = "authentik_policies_password" |     label = "authentik_policies_password" | ||||||
|     verbose_name = "authentik Policies.Password" |     verbose_name = "authentik Policies.Password" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik ldap provider app config""" | """authentik ldap provider app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikProviderLDAPConfig(AppConfig): | class AuthentikProviderLDAPConfig(ManagedAppConfig): | ||||||
|     """authentik ldap provider app config""" |     """authentik ldap provider app config""" | ||||||
|  |  | ||||||
|     name = "authentik.providers.ldap" |     name = "authentik.providers.ldap" | ||||||
|     label = "authentik_providers_ldap" |     label = "authentik_providers_ldap" | ||||||
|     verbose_name = "authentik Providers.LDAP" |     verbose_name = "authentik Providers.LDAP" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -10,3 +10,11 @@ class AuthentikProviderProxyConfig(ManagedAppConfig): | |||||||
|     label = "authentik_providers_proxy" |     label = "authentik_providers_proxy" | ||||||
|     verbose_name = "authentik Providers.Proxy" |     verbose_name = "authentik Providers.Proxy" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|  |     @ManagedAppConfig.reconcile_tenant | ||||||
|  |     def proxy_set_defaults(self): | ||||||
|  |         from authentik.providers.proxy.models import ProxyProvider | ||||||
|  |  | ||||||
|  |         for provider in ProxyProvider.objects.all(): | ||||||
|  |             provider.set_oauth_defaults() | ||||||
|  |             provider.save() | ||||||
|  | |||||||
| @ -2,25 +2,13 @@ | |||||||
|  |  | ||||||
| from asgiref.sync import async_to_sync | from asgiref.sync import async_to_sync | ||||||
| from channels.layers import get_channel_layer | from channels.layers import get_channel_layer | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError |  | ||||||
|  |  | ||||||
| from authentik.outposts.consumer import OUTPOST_GROUP | from authentik.outposts.consumer import OUTPOST_GROUP | ||||||
| from authentik.outposts.models import Outpost, OutpostType | from authentik.outposts.models import Outpost, OutpostType | ||||||
| from authentik.providers.oauth2.id_token import hash_session_key | from authentik.providers.oauth2.id_token import hash_session_key | ||||||
| from authentik.providers.proxy.models import ProxyProvider |  | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task( |  | ||||||
|     throws=(DatabaseError, ProgrammingError, InternalError), |  | ||||||
| ) |  | ||||||
| def proxy_set_defaults(): |  | ||||||
|     """Ensure correct defaults are set for all providers""" |  | ||||||
|     for provider in ProxyProvider.objects.all(): |  | ||||||
|         provider.set_oauth_defaults() |  | ||||||
|         provider.save() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task() | @CELERY_APP.task() | ||||||
| def proxy_on_logout(session_id: str): | def proxy_on_logout(session_id: str): | ||||||
|     """Update outpost instances connected to a single outpost""" |     """Update outpost instances connected to a single outpost""" | ||||||
|  | |||||||
| @ -166,7 +166,6 @@ class ConnectionToken(ExpiringModel): | |||||||
|         always_merger.merge(settings, default_settings) |         always_merger.merge(settings, default_settings) | ||||||
|         always_merger.merge(settings, self.endpoint.provider.settings) |         always_merger.merge(settings, self.endpoint.provider.settings) | ||||||
|         always_merger.merge(settings, self.endpoint.settings) |         always_merger.merge(settings, self.endpoint.settings) | ||||||
|         always_merger.merge(settings, self.settings) |  | ||||||
|  |  | ||||||
|         def mapping_evaluator(mappings: QuerySet): |         def mapping_evaluator(mappings: QuerySet): | ||||||
|             for mapping in mappings: |             for mapping in mappings: | ||||||
| @ -191,6 +190,7 @@ class ConnectionToken(ExpiringModel): | |||||||
|         mapping_evaluator( |         mapping_evaluator( | ||||||
|             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") |             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") | ||||||
|         ) |         ) | ||||||
|  |         always_merger.merge(settings, self.settings) | ||||||
|  |  | ||||||
|         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec |         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec | ||||||
|         settings["create-drive-path"] = "true" |         settings["create-drive-path"] = "true" | ||||||
|  | |||||||
| @ -90,23 +90,6 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         # Set settings in token |  | ||||||
|         token.settings = { |  | ||||||
|             "level": "token", |  | ||||||
|         } |  | ||||||
|         token.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             token.get_settings(), |  | ||||||
|             { |  | ||||||
|                 "hostname": self.endpoint.host.split(":")[0], |  | ||||||
|                 "port": "1324", |  | ||||||
|                 "client-name": f"authentik - {self.user}", |  | ||||||
|                 "drive-path": path, |  | ||||||
|                 "create-drive-path": "true", |  | ||||||
|                 "level": "token", |  | ||||||
|                 "resize-method": "display-update", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         # Set settings in property mapping (provider) |         # Set settings in property mapping (provider) | ||||||
|         mapping = RACPropertyMapping.objects.create( |         mapping = RACPropertyMapping.objects.create( | ||||||
|             name=generate_id(), |             name=generate_id(), | ||||||
| @ -151,3 +134,22 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |         # Set settings in token | ||||||
|  |         token.settings = { | ||||||
|  |             "level": "token", | ||||||
|  |         } | ||||||
|  |         token.save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             token.get_settings(), | ||||||
|  |             { | ||||||
|  |                 "hostname": self.endpoint.host.split(":")[0], | ||||||
|  |                 "port": "1324", | ||||||
|  |                 "client-name": f"authentik - {self.user}", | ||||||
|  |                 "drive-path": path, | ||||||
|  |                 "create-drive-path": "true", | ||||||
|  |                 "foo": "true", | ||||||
|  |                 "bar": "6", | ||||||
|  |                 "resize-method": "display-update", | ||||||
|  |                 "level": "token", | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik radius provider app config""" | """authentik radius provider app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikProviderRadiusConfig(AppConfig): | class AuthentikProviderRadiusConfig(ManagedAppConfig): | ||||||
|     """authentik radius provider app config""" |     """authentik radius provider app config""" | ||||||
|  |  | ||||||
|     name = "authentik.providers.radius" |     name = "authentik.providers.radius" | ||||||
|     label = "authentik_providers_radius" |     label = "authentik_providers_radius" | ||||||
|     verbose_name = "authentik Providers.Radius" |     verbose_name = "authentik Providers.Radius" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,12 +1,13 @@ | |||||||
| """authentik SAML IdP app config""" | """authentik SAML IdP app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikProviderSAMLConfig(AppConfig): | class AuthentikProviderSAMLConfig(ManagedAppConfig): | ||||||
|     """authentik SAML IdP app config""" |     """authentik SAML IdP app config""" | ||||||
|  |  | ||||||
|     name = "authentik.providers.saml" |     name = "authentik.providers.saml" | ||||||
|     label = "authentik_providers_saml" |     label = "authentik_providers_saml" | ||||||
|     verbose_name = "authentik Providers.SAML" |     verbose_name = "authentik Providers.SAML" | ||||||
|     mountpoint = "application/saml/" |     mountpoint = "application/saml/" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -47,15 +47,16 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: |     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_group = super().to_schema( |         raw_scim_group = super().to_schema(obj, connection) | ||||||
|             obj, |  | ||||||
|             connection, |  | ||||||
|             schemas=(SCIM_GROUP_SCHEMA,), |  | ||||||
|         ) |  | ||||||
|         try: |         try: | ||||||
|             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) |             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|  |         if SCIM_GROUP_SCHEMA not in scim_group.schemas: | ||||||
|  |             scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA) | ||||||
|  |         # As this might be unset, we need to tell pydantic it's set so ensure the schemas | ||||||
|  |         # are included, even if its just the defaults | ||||||
|  |         scim_group.schemas = list(scim_group.schemas) | ||||||
|         if not scim_group.externalId: |         if not scim_group.externalId: | ||||||
|             scim_group.externalId = str(obj.pk) |             scim_group.externalId = str(obj.pk) | ||||||
|  |  | ||||||
|  | |||||||
| @ -31,15 +31,16 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: |     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_user = super().to_schema( |         raw_scim_user = super().to_schema(obj, connection) | ||||||
|             obj, |  | ||||||
|             connection, |  | ||||||
|             schemas=(SCIM_USER_SCHEMA,), |  | ||||||
|         ) |  | ||||||
|         try: |         try: | ||||||
|             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) |             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|  |         if SCIM_USER_SCHEMA not in scim_user.schemas: | ||||||
|  |             scim_user.schemas.insert(0, SCIM_USER_SCHEMA) | ||||||
|  |         # As this might be unset, we need to tell pydantic it's set so ensure the schemas | ||||||
|  |         # are included, even if its just the defaults | ||||||
|  |         scim_user.schemas = list(scim_user.schemas) | ||||||
|         if not scim_user.externalId: |         if not scim_user.externalId: | ||||||
|             scim_user.externalId = str(obj.uid) |             scim_user.externalId = str(obj.uid) | ||||||
|         return scim_user |         return scim_user | ||||||
|  | |||||||
| @ -91,6 +91,57 @@ class SCIMUserTests(TestCase): | |||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     @Mocker() | ||||||
|  |     def test_user_create_custom_schema(self, mock: Mocker): | ||||||
|  |         """Test user creation with custom schema""" | ||||||
|  |         schema = SCIMMapping.objects.create( | ||||||
|  |             name="custom_schema", | ||||||
|  |             expression="""return {"schemas": ["foo"]}""", | ||||||
|  |         ) | ||||||
|  |         self.provider.property_mappings.add(schema) | ||||||
|  |         scim_id = generate_id() | ||||||
|  |         mock.get( | ||||||
|  |             "https://localhost/ServiceProviderConfig", | ||||||
|  |             json={}, | ||||||
|  |         ) | ||||||
|  |         mock.post( | ||||||
|  |             "https://localhost/Users", | ||||||
|  |             json={ | ||||||
|  |                 "id": scim_id, | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  |         uid = generate_id() | ||||||
|  |         user = User.objects.create( | ||||||
|  |             username=uid, | ||||||
|  |             name=f"{uid} {uid}", | ||||||
|  |             email=f"{uid}@goauthentik.io", | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(mock.call_count, 2) | ||||||
|  |         self.assertEqual(mock.request_history[0].method, "GET") | ||||||
|  |         self.assertEqual(mock.request_history[1].method, "POST") | ||||||
|  |         self.assertJSONEqual( | ||||||
|  |             mock.request_history[1].body, | ||||||
|  |             { | ||||||
|  |                 "schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"], | ||||||
|  |                 "active": True, | ||||||
|  |                 "emails": [ | ||||||
|  |                     { | ||||||
|  |                         "primary": True, | ||||||
|  |                         "type": "other", | ||||||
|  |                         "value": f"{uid}@goauthentik.io", | ||||||
|  |                     } | ||||||
|  |                 ], | ||||||
|  |                 "externalId": user.uid, | ||||||
|  |                 "name": { | ||||||
|  |                     "familyName": uid, | ||||||
|  |                     "formatted": f"{uid} {uid}", | ||||||
|  |                     "givenName": uid, | ||||||
|  |                 }, | ||||||
|  |                 "displayName": f"{uid} {uid}", | ||||||
|  |                 "userName": uid, | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @Mocker() |     @Mocker() | ||||||
|     def test_user_create_different_provider_same_id(self, mock: Mocker): |     def test_user_create_different_provider_same_id(self, mock: Mocker): | ||||||
|         """Test user creation with multiple providers that happen |         """Test user creation with multiple providers that happen | ||||||
| @ -384,7 +435,7 @@ class SCIMUserTests(TestCase): | |||||||
|                 self.assertIn(request.method, SAFE_METHODS) |                 self.assertIn(request.method, SAFE_METHODS) | ||||||
|         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() |         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() | ||||||
|         self.assertIsNotNone(task) |         self.assertIsNotNone(task) | ||||||
|         drop_msg = task.messages[2] |         drop_msg = task.messages[3] | ||||||
|         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") |         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["url"]) |         self.assertIsNotNone(drop_msg["attributes"]["url"]) | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["body"]) |         self.assertIsNotNone(drop_msg["attributes"]["body"]) | ||||||
|  | |||||||
| @ -1,12 +1,13 @@ | |||||||
| """authentik Recovery app config""" | """authentik Recovery app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikRecoveryConfig(AppConfig): | class AuthentikRecoveryConfig(ManagedAppConfig): | ||||||
|     """authentik Recovery app config""" |     """authentik Recovery app config""" | ||||||
|  |  | ||||||
|     name = "authentik.recovery" |     name = "authentik.recovery" | ||||||
|     label = "authentik_recovery" |     label = "authentik_recovery" | ||||||
|     verbose_name = "authentik Recovery" |     verbose_name = "authentik Recovery" | ||||||
|     mountpoint = "recovery/" |     mountpoint = "recovery/" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -26,7 +26,7 @@ from structlog.contextvars import STRUCTLOG_KEY_PREFIX | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from tenant_schemas_celery.app import CeleryApp as TenantAwareCeleryApp | from tenant_schemas_celery.app import CeleryApp as TenantAwareCeleryApp | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
| from authentik.lib.sentry import before_send | from authentik.lib.sentry import before_send | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
|  |  | ||||||
| @ -98,13 +98,7 @@ def _get_startup_tasks_default_tenant() -> list[Callable]: | |||||||
|  |  | ||||||
| def _get_startup_tasks_all_tenants() -> list[Callable]: | def _get_startup_tasks_all_tenants() -> list[Callable]: | ||||||
|     """Get all tasks to be run on startup for all tenants""" |     """Get all tasks to be run on startup for all tenants""" | ||||||
|     from authentik.admin.tasks import clear_update_notifications |     return [] | ||||||
|     from authentik.providers.proxy.tasks import proxy_set_defaults |  | ||||||
|  |  | ||||||
|     return [ |  | ||||||
|         clear_update_notifications, |  | ||||||
|         proxy_set_defaults, |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @worker_ready.connect | @worker_ready.connect | ||||||
| @ -164,7 +158,7 @@ class LivenessProbe(bootsteps.StartStopStep): | |||||||
| @inspect_command(default_timeout=0.2) | @inspect_command(default_timeout=0.2) | ||||||
| def ping(state, **kwargs): | def ping(state, **kwargs): | ||||||
|     """Ping worker(s).""" |     """Ping worker(s).""" | ||||||
|     return {"ok": "pong", "version": get_full_version()} |     return {"ok": "pong", "version": authentik_full_version()} | ||||||
|  |  | ||||||
|  |  | ||||||
| CELERY_APP.config_from_object(settings.CELERY) | CELERY_APP.config_from_object(settings.CELERY) | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ from celery.schedules import crontab | |||||||
| from sentry_sdk import set_tag | from sentry_sdk import set_tag | ||||||
| from xmlsec import enable_debug_trace | from xmlsec import enable_debug_trace | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.lib.config import CONFIG, django_db_config, redis_url | from authentik.lib.config import CONFIG, django_db_config, redis_url | ||||||
| from authentik.lib.logging import get_logger_config, structlog_configure | from authentik.lib.logging import get_logger_config, structlog_configure | ||||||
| from authentik.lib.sentry import sentry_init | from authentik.lib.sentry import sentry_init | ||||||
| @ -137,7 +137,7 @@ GUARDIAN_MONKEY_PATCH_USER = False | |||||||
| SPECTACULAR_SETTINGS = { | SPECTACULAR_SETTINGS = { | ||||||
|     "TITLE": "authentik", |     "TITLE": "authentik", | ||||||
|     "DESCRIPTION": "Making authentication simple.", |     "DESCRIPTION": "Making authentication simple.", | ||||||
|     "VERSION": __version__, |     "VERSION": authentik_version(), | ||||||
|     "COMPONENT_SPLIT_REQUEST": True, |     "COMPONENT_SPLIT_REQUEST": True, | ||||||
|     "SCHEMA_PATH_PREFIX": "/api/v([0-9]+(beta)?)", |     "SCHEMA_PATH_PREFIX": "/api/v([0-9]+(beta)?)", | ||||||
|     "SCHEMA_PATH_PREFIX_TRIM": True, |     "SCHEMA_PATH_PREFIX_TRIM": True, | ||||||
| @ -424,7 +424,7 @@ else: | |||||||
|         "BACKEND": "authentik.root.storages.FileStorage", |         "BACKEND": "authentik.root.storages.FileStorage", | ||||||
|         "OPTIONS": { |         "OPTIONS": { | ||||||
|             "location": Path(CONFIG.get("storage.media.file.path")), |             "location": Path(CONFIG.get("storage.media.file.path")), | ||||||
|             "base_url": "/media/", |             "base_url": CONFIG.get("web.path", "/") + "media/", | ||||||
|         }, |         }, | ||||||
|     } |     } | ||||||
|     # Compatibility for apps not supporting top-level STORAGES |     # Compatibility for apps not supporting top-level STORAGES | ||||||
| @ -486,7 +486,7 @@ if DEBUG: | |||||||
|  |  | ||||||
| TENANT_APPS.append("authentik.core") | TENANT_APPS.append("authentik.core") | ||||||
|  |  | ||||||
| CONFIG.log("info", "Booting authentik", version=__version__) | CONFIG.log("info", "Booting authentik", version=authentik_version()) | ||||||
|  |  | ||||||
| # Attempt to load enterprise app, if available | # Attempt to load enterprise app, if available | ||||||
| try: | try: | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ from ssl import OPENSSL_VERSION | |||||||
| import pytest | import pytest | ||||||
| from cryptography.hazmat.backends.openssl.backend import backend | from cryptography.hazmat.backends.openssl.backend import backend | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import authentik_full_version | ||||||
|  |  | ||||||
| IS_CI = "CI" in environ | IS_CI = "CI" in environ | ||||||
|  |  | ||||||
| @ -22,7 +22,7 @@ def pytest_sessionstart(*_, **__): | |||||||
| def pytest_report_header(*_, **__): | def pytest_report_header(*_, **__): | ||||||
|     """Add authentik version to pytest output""" |     """Add authentik version to pytest output""" | ||||||
|     return [ |     return [ | ||||||
|         f"authentik version: {get_full_version()}", |         f"authentik version: {authentik_full_version()}", | ||||||
|         f"OpenSSL version: {OPENSSL_VERSION}, FIPS: {backend._fips_enabled}", |         f"OpenSSL version: {OPENSSL_VERSION}, FIPS: {backend._fips_enabled}", | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -31,6 +31,8 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|  |  | ||||||
|         if kwargs.get("randomly_seed", None): |         if kwargs.get("randomly_seed", None): | ||||||
|             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") |             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") | ||||||
|  |         if kwargs.get("no_capture", False): | ||||||
|  |             self.args.append("--capture=no") | ||||||
|  |  | ||||||
|         settings.TEST = True |         settings.TEST = True | ||||||
|         settings.CELERY["task_always_eager"] = True |         settings.CELERY["task_always_eager"] = True | ||||||
| @ -64,6 +66,11 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|             "Default behaviour: use random.Random().getrandbits(32), so the seed is" |             "Default behaviour: use random.Random().getrandbits(32), so the seed is" | ||||||
|             "different on each run.", |             "different on each run.", | ||||||
|         ) |         ) | ||||||
|  |         parser.add_argument( | ||||||
|  |             "--no-capture", | ||||||
|  |             action="store_true", | ||||||
|  |             help="Disable any capturing of stdout/stderr during tests.", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def run_tests(self, test_labels, extra_tests=None, **kwargs): |     def run_tests(self, test_labels, extra_tests=None, **kwargs): | ||||||
|         """Run pytest and return the exitcode. |         """Run pytest and return the exitcode. | ||||||
|  | |||||||
| @ -103,6 +103,7 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "user_object_filter", |             "user_object_filter", | ||||||
|             "group_object_filter", |             "group_object_filter", | ||||||
|             "group_membership_field", |             "group_membership_field", | ||||||
|  |             "user_membership_attribute", | ||||||
|             "object_uniqueness_field", |             "object_uniqueness_field", | ||||||
|             "password_login_update_internal_password", |             "password_login_update_internal_password", | ||||||
|             "sync_users", |             "sync_users", | ||||||
| @ -111,6 +112,7 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "sync_parent_group", |             "sync_parent_group", | ||||||
|             "connectivity", |             "connectivity", | ||||||
|             "lookup_groups_from_user", |             "lookup_groups_from_user", | ||||||
|  |             "delete_not_found_objects", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {"bind_password": {"write_only": True}} |         extra_kwargs = {"bind_password": {"write_only": True}} | ||||||
|  |  | ||||||
| @ -138,6 +140,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_object_filter", |         "user_object_filter", | ||||||
|         "group_object_filter", |         "group_object_filter", | ||||||
|         "group_membership_field", |         "group_membership_field", | ||||||
|  |         "user_membership_attribute", | ||||||
|         "object_uniqueness_field", |         "object_uniqueness_field", | ||||||
|         "password_login_update_internal_password", |         "password_login_update_internal_password", | ||||||
|         "sync_users", |         "sync_users", | ||||||
| @ -147,6 +150,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_property_mappings", |         "user_property_mappings", | ||||||
|         "group_property_mappings", |         "group_property_mappings", | ||||||
|         "lookup_groups_from_user", |         "lookup_groups_from_user", | ||||||
|  |         "delete_not_found_objects", | ||||||
|     ] |     ] | ||||||
|     search_fields = ["name", "slug"] |     search_fields = ["name", "slug"] | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|  | |||||||
| @ -0,0 +1,48 @@ | |||||||
|  | # Generated by Django 5.1.9 on 2025-05-28 08:15 | ||||||
|  |  | ||||||
|  | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|  |     dependencies = [ | ||||||
|  |         ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"), | ||||||
|  |         ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"), | ||||||
|  |     ] | ||||||
|  |  | ||||||
|  |     operations = [ | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="groupldapsourceconnection", | ||||||
|  |             name="validated_by", | ||||||
|  |             field=models.UUIDField( | ||||||
|  |                 blank=True, | ||||||
|  |                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||||
|  |                 null=True, | ||||||
|  |             ), | ||||||
|  |         ), | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="ldapsource", | ||||||
|  |             name="delete_not_found_objects", | ||||||
|  |             field=models.BooleanField( | ||||||
|  |                 default=False, | ||||||
|  |                 help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.", | ||||||
|  |             ), | ||||||
|  |         ), | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="userldapsourceconnection", | ||||||
|  |             name="validated_by", | ||||||
|  |             field=models.UUIDField( | ||||||
|  |                 blank=True, | ||||||
|  |                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||||
|  |                 null=True, | ||||||
|  |             ), | ||||||
|  |         ), | ||||||
|  |         migrations.AddIndex( | ||||||
|  |             model_name="groupldapsourceconnection", | ||||||
|  |             index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"), | ||||||
|  |         ), | ||||||
|  |         migrations.AddIndex( | ||||||
|  |             model_name="userldapsourceconnection", | ||||||
|  |             index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"), | ||||||
|  |         ), | ||||||
|  |     ] | ||||||
| @ -0,0 +1,32 @@ | |||||||
|  | # Generated by Django 5.1.9 on 2025-05-29 11:22 | ||||||
|  |  | ||||||
|  | from django.apps.registry import Apps | ||||||
|  | from django.db import migrations, models | ||||||
|  | from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|  |     LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource") | ||||||
|  |     db_alias = schema_editor.connection.alias | ||||||
|  |  | ||||||
|  |     LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update( | ||||||
|  |         user_membership_attribute="ldap_uniq" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Migration(migrations.Migration): | ||||||
|  |     dependencies = [ | ||||||
|  |         ("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"), | ||||||
|  |     ] | ||||||
|  |  | ||||||
|  |     operations = [ | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="ldapsource", | ||||||
|  |             name="user_membership_attribute", | ||||||
|  |             field=models.TextField( | ||||||
|  |                 default="distinguishedName", | ||||||
|  |                 help_text="Attribute which matches the value of `group_membership_field`.", | ||||||
|  |             ), | ||||||
|  |         ), | ||||||
|  |         migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop), | ||||||
|  |     ] | ||||||
| @ -100,6 +100,10 @@ class LDAPSource(Source): | |||||||
|         default="(objectClass=person)", |         default="(objectClass=person)", | ||||||
|         help_text=_("Consider Objects matching this filter to be Users."), |         help_text=_("Consider Objects matching this filter to be Users."), | ||||||
|     ) |     ) | ||||||
|  |     user_membership_attribute = models.TextField( | ||||||
|  |         default=LDAP_DISTINGUISHED_NAME, | ||||||
|  |         help_text=_("Attribute which matches the value of `group_membership_field`."), | ||||||
|  |     ) | ||||||
|     group_membership_field = models.TextField( |     group_membership_field = models.TextField( | ||||||
|         default="member", help_text=_("Field which contains members of a group.") |         default="member", help_text=_("Field which contains members of a group.") | ||||||
|     ) |     ) | ||||||
| @ -137,6 +141,14 @@ class LDAPSource(Source): | |||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |     delete_not_found_objects = models.BooleanField( | ||||||
|  |         default=False, | ||||||
|  |         help_text=_( | ||||||
|  |             "Delete authentik users and groups which were previously supplied by this source, " | ||||||
|  |             "but are now missing from it." | ||||||
|  |         ), | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         return "ak-source-ldap-form" |         return "ak-source-ldap-form" | ||||||
| @ -321,6 +333,12 @@ class LDAPSourcePropertyMapping(PropertyMapping): | |||||||
|  |  | ||||||
|  |  | ||||||
| class UserLDAPSourceConnection(UserSourceConnection): | class UserLDAPSourceConnection(UserSourceConnection): | ||||||
|  |     validated_by = models.UUIDField( | ||||||
|  |         null=True, | ||||||
|  |         blank=True, | ||||||
|  |         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -332,9 +350,18 @@ class UserLDAPSourceConnection(UserSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("User LDAP Source Connection") |         verbose_name = _("User LDAP Source Connection") | ||||||
|         verbose_name_plural = _("User LDAP Source Connections") |         verbose_name_plural = _("User LDAP Source Connections") | ||||||
|  |         indexes = [ | ||||||
|  |             models.Index(fields=["validated_by"]), | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupLDAPSourceConnection(GroupSourceConnection): | class GroupLDAPSourceConnection(GroupSourceConnection): | ||||||
|  |     validated_by = models.UUIDField( | ||||||
|  |         null=True, | ||||||
|  |         blank=True, | ||||||
|  |         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -346,3 +373,6 @@ class GroupLDAPSourceConnection(GroupSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Group LDAP Source Connection") |         verbose_name = _("Group LDAP Source Connection") | ||||||
|         verbose_name_plural = _("Group LDAP Source Connections") |         verbose_name_plural = _("Group LDAP Source Connections") | ||||||
|  |         indexes = [ | ||||||
|  |             models.Index(fields=["validated_by"]), | ||||||
|  |         ] | ||||||
|  | |||||||
| @ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger | |||||||
| from authentik.core.sources.mapper import SourceMapper | from authentik.core.sources.mapper import SourceMapper | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sync.mapper import PropertyMappingManager | from authentik.lib.sync.mapper import PropertyMappingManager | ||||||
| from authentik.sources.ldap.models import LDAPSource | from authentik.sources.ldap.models import LDAPSource, flatten | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseLDAPSynchronizer: | class BaseLDAPSynchronizer: | ||||||
| @ -77,6 +77,16 @@ class BaseLDAPSynchronizer: | |||||||
|         """Get objects from LDAP, implemented in subclass""" |         """Get objects from LDAP, implemented in subclass""" | ||||||
|         raise NotImplementedError() |         raise NotImplementedError() | ||||||
|  |  | ||||||
|  |     def get_attributes(self, object): | ||||||
|  |         if "attributes" not in object: | ||||||
|  |             return | ||||||
|  |         return object.get("attributes", {}) | ||||||
|  |  | ||||||
|  |     def get_identifier(self, attributes: dict): | ||||||
|  |         if not attributes.get(self._source.object_uniqueness_field): | ||||||
|  |             return | ||||||
|  |         return flatten(attributes[self._source.object_uniqueness_field]) | ||||||
|  |  | ||||||
|     def search_paginator(  # noqa: PLR0913 |     def search_paginator(  # noqa: PLR0913 | ||||||
|         self, |         self, | ||||||
|         search_base, |         search_base, | ||||||
|  | |||||||
							
								
								
									
										61
									
								
								authentik/sources/ldap/sync/forward_delete_groups.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								authentik/sources/ldap/sync/forward_delete_groups.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,61 @@ | |||||||
|  | from collections.abc import Generator | ||||||
|  | from itertools import batched | ||||||
|  | from uuid import uuid4 | ||||||
|  |  | ||||||
|  | from ldap3 import SUBTREE | ||||||
|  |  | ||||||
|  | from authentik.core.models import Group | ||||||
|  | from authentik.sources.ldap.models import GroupLDAPSourceConnection | ||||||
|  | from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||||
|  | from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class GroupLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||||
|  |     """Delete LDAP Groups from authentik""" | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def name() -> str: | ||||||
|  |         return "group_deletions" | ||||||
|  |  | ||||||
|  |     def get_objects(self, **kwargs) -> Generator: | ||||||
|  |         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||||
|  |             self.message("Group syncing is disabled for this Source") | ||||||
|  |             return iter(()) | ||||||
|  |  | ||||||
|  |         uuid = uuid4() | ||||||
|  |         groups = self._source.connection().extend.standard.paged_search( | ||||||
|  |             search_base=self.base_dn_groups, | ||||||
|  |             search_filter=self._source.group_object_filter, | ||||||
|  |             search_scope=SUBTREE, | ||||||
|  |             attributes=[self._source.object_uniqueness_field], | ||||||
|  |             generator=True, | ||||||
|  |             **kwargs, | ||||||
|  |         ) | ||||||
|  |         for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False): | ||||||
|  |             identifiers = [] | ||||||
|  |             for group in batch: | ||||||
|  |                 if not (attributes := self.get_attributes(group)): | ||||||
|  |                     continue | ||||||
|  |                 if identifier := self.get_identifier(attributes): | ||||||
|  |                     identifiers.append(identifier) | ||||||
|  |             GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||||
|  |                 validated_by=uuid | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return batched( | ||||||
|  |             GroupLDAPSourceConnection.objects.filter(source=self._source) | ||||||
|  |             .exclude(validated_by=uuid) | ||||||
|  |             .values_list("group", flat=True) | ||||||
|  |             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||||
|  |             DELETE_CHUNK_SIZE, | ||||||
|  |             strict=False, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def sync(self, group_pks: tuple) -> int: | ||||||
|  |         """Delete authentik groups""" | ||||||
|  |         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||||
|  |             self.message("Group syncing is disabled for this Source") | ||||||
|  |             return -1 | ||||||
|  |         self._logger.debug("Deleting groups", group_pks=group_pks) | ||||||
|  |         _, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete() | ||||||
|  |         return deleted_per_type.get(Group._meta.label, 0) | ||||||
							
								
								
									
										63
									
								
								authentik/sources/ldap/sync/forward_delete_users.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								authentik/sources/ldap/sync/forward_delete_users.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,63 @@ | |||||||
|  | from collections.abc import Generator | ||||||
|  | from itertools import batched | ||||||
|  | from uuid import uuid4 | ||||||
|  |  | ||||||
|  | from ldap3 import SUBTREE | ||||||
|  |  | ||||||
|  | from authentik.core.models import User | ||||||
|  | from authentik.sources.ldap.models import UserLDAPSourceConnection | ||||||
|  | from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||||
|  |  | ||||||
|  | UPDATE_CHUNK_SIZE = 10_000 | ||||||
|  | DELETE_CHUNK_SIZE = 50 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UserLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||||
|  |     """Delete LDAP Users from authentik""" | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def name() -> str: | ||||||
|  |         return "user_deletions" | ||||||
|  |  | ||||||
|  |     def get_objects(self, **kwargs) -> Generator: | ||||||
|  |         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||||
|  |             self.message("User syncing is disabled for this Source") | ||||||
|  |             return iter(()) | ||||||
|  |  | ||||||
|  |         uuid = uuid4() | ||||||
|  |         users = self._source.connection().extend.standard.paged_search( | ||||||
|  |             search_base=self.base_dn_users, | ||||||
|  |             search_filter=self._source.user_object_filter, | ||||||
|  |             search_scope=SUBTREE, | ||||||
|  |             attributes=[self._source.object_uniqueness_field], | ||||||
|  |             generator=True, | ||||||
|  |             **kwargs, | ||||||
|  |         ) | ||||||
|  |         for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False): | ||||||
|  |             identifiers = [] | ||||||
|  |             for user in batch: | ||||||
|  |                 if not (attributes := self.get_attributes(user)): | ||||||
|  |                     continue | ||||||
|  |                 if identifier := self.get_identifier(attributes): | ||||||
|  |                     identifiers.append(identifier) | ||||||
|  |             UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||||
|  |                 validated_by=uuid | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return batched( | ||||||
|  |             UserLDAPSourceConnection.objects.filter(source=self._source) | ||||||
|  |             .exclude(validated_by=uuid) | ||||||
|  |             .values_list("user", flat=True) | ||||||
|  |             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||||
|  |             DELETE_CHUNK_SIZE, | ||||||
|  |             strict=False, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def sync(self, user_pks: tuple) -> int: | ||||||
|  |         """Delete authentik users""" | ||||||
|  |         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||||
|  |             self.message("User syncing is disabled for this Source") | ||||||
|  |             return -1 | ||||||
|  |         self._logger.debug("Deleting users", user_pks=user_pks) | ||||||
|  |         _, deleted_per_type = User.objects.filter(pk__in=user_pks).delete() | ||||||
|  |         return deleted_per_type.get(User._meta.label, 0) | ||||||
| @ -58,18 +58,16 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         group_count = 0 |         group_count = 0 | ||||||
|         for group in page_data: |         for group in page_data: | ||||||
|             if "attributes" not in group: |             if (attributes := self.get_attributes(group)) is None: | ||||||
|                 continue |                 continue | ||||||
|             attributes = group.get("attributes", {}) |  | ||||||
|             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) |             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) | ||||||
|             if not attributes.get(self._source.object_uniqueness_field): |             if not (uniq := self.get_identifier(attributes)): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=group_dn, |                     dn=group_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) |  | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -63,25 +63,19 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|                     group_member_dn = group_member.get("dn", {}) |                     group_member_dn = group_member.get("dn", {}) | ||||||
|                     members.append(group_member_dn) |                     members.append(group_member_dn) | ||||||
|             else: |             else: | ||||||
|                 if "attributes" not in group: |                 if (attributes := self.get_attributes(group)) is None: | ||||||
|                     continue |                     continue | ||||||
|                 members = group.get("attributes", {}).get(self._source.group_membership_field, []) |                 members = attributes.get(self._source.group_membership_field, []) | ||||||
|  |  | ||||||
|             ak_group = self.get_group(group) |             ak_group = self.get_group(group) | ||||||
|             if not ak_group: |             if not ak_group: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             membership_mapping_attribute = LDAP_DISTINGUISHED_NAME |  | ||||||
|             if self._source.group_membership_field == "memberUid": |  | ||||||
|                 # If memberships are based on the posixGroup's 'memberUid' |  | ||||||
|                 # attribute we use the RDN instead of the FDN to lookup members. |  | ||||||
|                 membership_mapping_attribute = LDAP_UNIQUENESS |  | ||||||
|  |  | ||||||
|             users = User.objects.filter( |             users = User.objects.filter( | ||||||
|                 Q(**{f"attributes__{membership_mapping_attribute}__in": members}) |                 Q(**{f"attributes__{self._source.user_membership_attribute}__in": members}) | ||||||
|                 | Q( |                 | Q( | ||||||
|                     **{ |                     **{ | ||||||
|                         f"attributes__{membership_mapping_attribute}__isnull": True, |                         f"attributes__{self._source.user_membership_attribute}__isnull": True, | ||||||
|                         "ak_groups__in": [ak_group], |                         "ak_groups__in": [ak_group], | ||||||
|                     } |                     } | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -60,18 +60,16 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         user_count = 0 |         user_count = 0 | ||||||
|         for user in page_data: |         for user in page_data: | ||||||
|             if "attributes" not in user: |             if (attributes := self.get_attributes(user)) is None: | ||||||
|                 continue |                 continue | ||||||
|             attributes = user.get("attributes", {}) |  | ||||||
|             user_dn = flatten(user.get("entryDN", user.get("dn"))) |             user_dn = flatten(user.get("entryDN", user.get("dn"))) | ||||||
|             if not attributes.get(self._source.object_uniqueness_field): |             if not (uniq := self.get_identifier(attributes)): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=user_dn, |                     dn=user_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) |  | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -17,6 +17,8 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class | |||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.sources.ldap.models import LDAPSource | from authentik.sources.ldap.models import LDAPSource | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||||
|  | from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion | ||||||
|  | from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion | ||||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||||
| @ -52,11 +54,11 @@ def ldap_connectivity_check(pk: str | None = None): | |||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task( | @CELERY_APP.task( | ||||||
|     # We take the configured hours timeout time by 2.5 as we run user and |     # We take the configured hours timeout time by 3.5 as we run user and | ||||||
|     # group in parallel and then membership, so 2x is to cover the serial tasks, |     # group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, | ||||||
|     # and 0.5x on top of that to give some more leeway |     # and 0.5x on top of that to give some more leeway | ||||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, |     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, |     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||||
| ) | ) | ||||||
| def ldap_sync_single(source_pk: str): | def ldap_sync_single(source_pk: str): | ||||||
|     """Sync a single source""" |     """Sync a single source""" | ||||||
| @ -79,6 +81,25 @@ def ldap_sync_single(source_pk: str): | |||||||
|             group( |             group( | ||||||
|                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), |                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), | ||||||
|             ), |             ), | ||||||
|  |             # Finally, deletions. What we'd really like to do here is something like | ||||||
|  |             # ``` | ||||||
|  |             # user_identifiers = <ldap query> | ||||||
|  |             # User.objects.exclude( | ||||||
|  |             #     usersourceconnection__identifier__in=user_uniqueness_identifiers, | ||||||
|  |             # ).delete() | ||||||
|  |             # ``` | ||||||
|  |             # This runs into performance issues in large installations. So instead we spread the | ||||||
|  |             # work out into three steps: | ||||||
|  |             # 1. Get every object from the LDAP source. | ||||||
|  |             # 2. Mark every object as "safe" in the database. This is quick, but any error could | ||||||
|  |             #    mean deleting users which should not be deleted, so we do it immediately, in | ||||||
|  |             #    large chunks, and only queue the deletion step afterwards. | ||||||
|  |             # 3. Delete every unmarked item. This is slow, so we spread it over many tasks in | ||||||
|  |             #    small chunks. | ||||||
|  |             group( | ||||||
|  |                 ldap_sync_paginator(source, UserLDAPForwardDeletion) | ||||||
|  |                 + ldap_sync_paginator(source, GroupLDAPForwardDeletion), | ||||||
|  |             ), | ||||||
|         ) |         ) | ||||||
|         task() |         task() | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,6 +2,33 @@ | |||||||
|  |  | ||||||
| from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | ||||||
|  |  | ||||||
|  | # The mock modifies these in place, so we have to define them per string | ||||||
|  | user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io" | ||||||
|  | user_in_slapd_cn = "user_in_slapd_cn" | ||||||
|  | user_in_slapd_uid = "user_in_slapd_uid" | ||||||
|  | user_in_slapd_object_class = "person" | ||||||
|  | user_in_slapd = { | ||||||
|  |     "dn": user_in_slapd_dn, | ||||||
|  |     "attributes": { | ||||||
|  |         "cn": user_in_slapd_cn, | ||||||
|  |         "uid": user_in_slapd_uid, | ||||||
|  |         "objectClass": user_in_slapd_object_class, | ||||||
|  |     }, | ||||||
|  | } | ||||||
|  | group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io" | ||||||
|  | group_in_slapd_cn = "group_in_slapd_cn" | ||||||
|  | group_in_slapd_uid = "group_in_slapd_uid" | ||||||
|  | group_in_slapd_object_class = "groupOfNames" | ||||||
|  | group_in_slapd = { | ||||||
|  |     "dn": group_in_slapd_dn, | ||||||
|  |     "attributes": { | ||||||
|  |         "cn": group_in_slapd_cn, | ||||||
|  |         "uid": group_in_slapd_uid, | ||||||
|  |         "objectClass": group_in_slapd_object_class, | ||||||
|  |         "member": [user_in_slapd["dn"]], | ||||||
|  |     }, | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
| def mock_slapd_connection(password: str) -> Connection: | def mock_slapd_connection(password: str) -> Connection: | ||||||
|     """Create mock SLAPD connection""" |     """Create mock SLAPD connection""" | ||||||
| @ -96,5 +123,14 @@ def mock_slapd_connection(password: str) -> Connection: | |||||||
|             "objectClass": "posixAccount", |             "objectClass": "posixAccount", | ||||||
|         }, |         }, | ||||||
|     ) |     ) | ||||||
|  |     # Known user and group | ||||||
|  |     connection.strategy.add_entry( | ||||||
|  |         user_in_slapd["dn"], | ||||||
|  |         user_in_slapd["attributes"], | ||||||
|  |     ) | ||||||
|  |     connection.strategy.add_entry( | ||||||
|  |         group_in_slapd["dn"], | ||||||
|  |         group_in_slapd["attributes"], | ||||||
|  |     ) | ||||||
|     connection.bind() |     connection.bind() | ||||||
|     return connection |     return connection | ||||||
|  | |||||||
| @ -13,14 +13,26 @@ from authentik.events.system_tasks import TaskStatus | |||||||
| from authentik.lib.generators import generate_id, generate_key | from authentik.lib.generators import generate_id, generate_key | ||||||
| from authentik.lib.sync.outgoing.exceptions import StopSync | from authentik.lib.sync.outgoing.exceptions import StopSync | ||||||
| from authentik.lib.utils.reflection import class_to_path | from authentik.lib.utils.reflection import class_to_path | ||||||
| from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping | from authentik.sources.ldap.models import ( | ||||||
|  |     GroupLDAPSourceConnection, | ||||||
|  |     LDAPSource, | ||||||
|  |     LDAPSourcePropertyMapping, | ||||||
|  |     UserLDAPSourceConnection, | ||||||
|  | ) | ||||||
|  | from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE | ||||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||||
| from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | ||||||
| from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | ||||||
| from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | ||||||
| from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection | from authentik.sources.ldap.tests.mock_slapd import ( | ||||||
|  |     group_in_slapd_cn, | ||||||
|  |     group_in_slapd_uid, | ||||||
|  |     mock_slapd_connection, | ||||||
|  |     user_in_slapd_cn, | ||||||
|  |     user_in_slapd_uid, | ||||||
|  | ) | ||||||
|  |  | ||||||
| LDAP_PASSWORD = generate_key() | LDAP_PASSWORD = generate_key() | ||||||
|  |  | ||||||
| @ -257,11 +269,55 @@ class LDAPSyncTests(TestCase): | |||||||
|         self.source.group_membership_field = "memberUid" |         self.source.group_membership_field = "memberUid" | ||||||
|         self.source.user_object_filter = "(objectClass=posixAccount)" |         self.source.user_object_filter = "(objectClass=posixAccount)" | ||||||
|         self.source.group_object_filter = "(objectClass=posixGroup)" |         self.source.group_object_filter = "(objectClass=posixGroup)" | ||||||
|  |         self.source.user_membership_attribute = "uid" | ||||||
|         self.source.user_property_mappings.set( |         self.source.user_property_mappings.set( | ||||||
|             LDAPSourcePropertyMapping.objects.filter( |             [ | ||||||
|  |                 *LDAPSourcePropertyMapping.objects.filter( | ||||||
|                     Q(managed__startswith="goauthentik.io/sources/ldap/default") |                     Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||||
|                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") |                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||||
|  |                 ).all(), | ||||||
|  |                 LDAPSourcePropertyMapping.objects.create( | ||||||
|  |                     name="name", | ||||||
|  |                     expression='return {"attributes": {"uid": list_flatten(ldap.get("uid"))}}', | ||||||
|  |                 ), | ||||||
|  |             ] | ||||||
|         ) |         ) | ||||||
|  |         self.source.group_property_mappings.set( | ||||||
|  |             LDAPSourcePropertyMapping.objects.filter( | ||||||
|  |                 managed="goauthentik.io/sources/ldap/openldap-cn" | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             self.source.save() | ||||||
|  |             user_sync = UserLDAPSynchronizer(self.source) | ||||||
|  |             user_sync.sync_full() | ||||||
|  |             group_sync = GroupLDAPSynchronizer(self.source) | ||||||
|  |             group_sync.sync_full() | ||||||
|  |             membership_sync = MembershipLDAPSynchronizer(self.source) | ||||||
|  |             membership_sync.sync_full() | ||||||
|  |             # Test if membership mapping based on memberUid works. | ||||||
|  |             posix_group = Group.objects.filter(name="group-posix").first() | ||||||
|  |             self.assertTrue(posix_group.users.filter(name="user-posix").exists()) | ||||||
|  |  | ||||||
|  |     def test_sync_groups_openldap_posix_group_nonstandard_membership_attribute(self): | ||||||
|  |         """Test posix group sync""" | ||||||
|  |         self.source.object_uniqueness_field = "cn" | ||||||
|  |         self.source.group_membership_field = "memberUid" | ||||||
|  |         self.source.user_object_filter = "(objectClass=posixAccount)" | ||||||
|  |         self.source.group_object_filter = "(objectClass=posixGroup)" | ||||||
|  |         self.source.user_membership_attribute = "cn" | ||||||
|  |         self.source.user_property_mappings.set( | ||||||
|  |             [ | ||||||
|  |                 *LDAPSourcePropertyMapping.objects.filter( | ||||||
|  |                     Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||||
|  |                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||||
|  |                 ).all(), | ||||||
|  |                 LDAPSourcePropertyMapping.objects.create( | ||||||
|  |                     name="name", | ||||||
|  |                     expression='return {"attributes": {"cn": list_flatten(ldap.get("cn"))}}', | ||||||
|  |                 ), | ||||||
|  |             ] | ||||||
|         ) |         ) | ||||||
|         self.source.group_property_mappings.set( |         self.source.group_property_mappings.set( | ||||||
|             LDAPSourcePropertyMapping.objects.filter( |             LDAPSourcePropertyMapping.objects.filter( | ||||||
| @ -308,3 +364,160 @@ class LDAPSyncTests(TestCase): | |||||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|             ldap_sync_all.delay().get() |             ldap_sync_all.delay().get() | ||||||
|  |  | ||||||
|  |     def test_user_deletion(self): | ||||||
|  |         """Test user deletion""" | ||||||
|  |         user = User.objects.create_user(username="not-in-the-source") | ||||||
|  |         UserLDAPSourceConnection.objects.create( | ||||||
|  |             user=user, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertFalse(User.objects.filter(username="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_user_deletion_still_in_source(self): | ||||||
|  |         """Test that user is not deleted if it's still in the source""" | ||||||
|  |         username = user_in_slapd_cn | ||||||
|  |         identifier = user_in_slapd_uid | ||||||
|  |         user = User.objects.create_user(username=username) | ||||||
|  |         UserLDAPSourceConnection.objects.create( | ||||||
|  |             user=user, source=self.source, identifier=identifier | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(User.objects.filter(username=username).exists()) | ||||||
|  |  | ||||||
|  |     def test_user_deletion_no_sync(self): | ||||||
|  |         """Test that user is not deleted if sync_users is False""" | ||||||
|  |         user = User.objects.create_user(username="not-in-the-source") | ||||||
|  |         UserLDAPSourceConnection.objects.create( | ||||||
|  |             user=user, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.sync_users = False | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_user_deletion_no_delete(self): | ||||||
|  |         """Test that user is not deleted if delete_not_found_objects is False""" | ||||||
|  |         user = User.objects.create_user(username="not-in-the-source") | ||||||
|  |         UserLDAPSourceConnection.objects.create( | ||||||
|  |             user=user, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_group_deletion(self): | ||||||
|  |         """Test group deletion""" | ||||||
|  |         group = Group.objects.create(name="not-in-the-source") | ||||||
|  |         GroupLDAPSourceConnection.objects.create( | ||||||
|  |             group=group, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertFalse(Group.objects.filter(name="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_group_deletion_still_in_source(self): | ||||||
|  |         """Test that group is not deleted if it's still in the source""" | ||||||
|  |         groupname = group_in_slapd_cn | ||||||
|  |         identifier = group_in_slapd_uid | ||||||
|  |         group = Group.objects.create(name=groupname) | ||||||
|  |         GroupLDAPSourceConnection.objects.create( | ||||||
|  |             group=group, source=self.source, identifier=identifier | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(Group.objects.filter(name=groupname).exists()) | ||||||
|  |  | ||||||
|  |     def test_group_deletion_no_sync(self): | ||||||
|  |         """Test that group is not deleted if sync_groups is False""" | ||||||
|  |         group = Group.objects.create(name="not-in-the-source") | ||||||
|  |         GroupLDAPSourceConnection.objects.create( | ||||||
|  |             group=group, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.sync_groups = False | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_group_deletion_no_delete(self): | ||||||
|  |         """Test that group is not deleted if delete_not_found_objects is False""" | ||||||
|  |         group = Group.objects.create(name="not-in-the-source") | ||||||
|  |         GroupLDAPSourceConnection.objects.create( | ||||||
|  |             group=group, source=self.source, identifier="not-in-the-source" | ||||||
|  |         ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||||
|  |  | ||||||
|  |     def test_batch_deletion(self): | ||||||
|  |         """Test batch deletion""" | ||||||
|  |         BATCH_SIZE = DELETE_CHUNK_SIZE + 1 | ||||||
|  |         for i in range(BATCH_SIZE): | ||||||
|  |             user = User.objects.create_user(username=f"not-in-the-source-{i}") | ||||||
|  |             group = Group.objects.create(name=f"not-in-the-source-{i}") | ||||||
|  |             group.users.add(user) | ||||||
|  |             UserLDAPSourceConnection.objects.create( | ||||||
|  |                 user=user, source=self.source, identifier=f"not-in-the-source-{i}-user" | ||||||
|  |             ) | ||||||
|  |             GroupLDAPSourceConnection.objects.create( | ||||||
|  |                 group=group, source=self.source, identifier=f"not-in-the-source-{i}-group" | ||||||
|  |             ) | ||||||
|  |         self.source.object_uniqueness_field = "uid" | ||||||
|  |         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||||
|  |         self.source.delete_not_found_objects = True | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||||
|  |         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||||
|  |             ldap_sync_all.delay().get() | ||||||
|  |  | ||||||
|  |         self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists()) | ||||||
|  |         self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists()) | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik plex config""" | """authentik plex config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikSourcePlexConfig(AppConfig): | class AuthentikSourcePlexConfig(ManagedAppConfig): | ||||||
|     """authentik source plex config""" |     """authentik source plex config""" | ||||||
|  |  | ||||||
|     name = "authentik.sources.plex" |     name = "authentik.sources.plex" | ||||||
|     label = "authentik_sources_plex" |     label = "authentik_sources_plex" | ||||||
|     verbose_name = "authentik Sources.Plex" |     verbose_name = "authentik Sources.Plex" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -6,7 +6,7 @@ from django.http.response import Http404 | |||||||
| from requests.exceptions import RequestException | from requests.exceptions import RequestException | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import authentik_version | ||||||
| from authentik.core.sources.flow_manager import SourceFlowManager | from authentik.core.sources.flow_manager import SourceFlowManager | ||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| from authentik.sources.plex.models import PlexSource, UserPlexSourceConnection | from authentik.sources.plex.models import PlexSource, UserPlexSourceConnection | ||||||
| @ -34,7 +34,7 @@ class PlexAuth: | |||||||
|         """Get common headers""" |         """Get common headers""" | ||||||
|         return { |         return { | ||||||
|             "X-Plex-Product": "authentik", |             "X-Plex-Product": "authentik", | ||||||
|             "X-Plex-Version": __version__, |             "X-Plex-Version": authentik_version(), | ||||||
|             "X-Plex-Device-Vendor": "goauthentik.io", |             "X-Plex-Device-Vendor": "goauthentik.io", | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | |||||||
| @ -9,6 +9,7 @@ from django.http.response import HttpResponseBadRequest | |||||||
| from django.shortcuts import get_object_or_404, redirect | from django.shortcuts import get_object_or_404, redirect | ||||||
| from django.utils.decorators import method_decorator | from django.utils.decorators import method_decorator | ||||||
| from django.utils.http import urlencode | from django.utils.http import urlencode | ||||||
|  | from django.utils.translation import gettext as _ | ||||||
| from django.views import View | from django.views import View | ||||||
| from django.views.decorators.csrf import csrf_exempt | from django.views.decorators.csrf import csrf_exempt | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -128,7 +129,9 @@ class InitiateView(View): | |||||||
|         # otherwise we default to POST_AUTO, with direct redirect |         # otherwise we default to POST_AUTO, with direct redirect | ||||||
|         if source.binding_type == SAMLBindingTypes.POST: |         if source.binding_type == SAMLBindingTypes.POST: | ||||||
|             injected_stages.append(in_memory_stage(ConsentStageView)) |             injected_stages.append(in_memory_stage(ConsentStageView)) | ||||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}" |             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _( | ||||||
|  |                 "Continue to {source_name}".format(source_name=source.name) | ||||||
|  |             ) | ||||||
|         injected_stages.append(in_memory_stage(AutosubmitStageView)) |         injected_stages.append(in_memory_stage(AutosubmitStageView)) | ||||||
|         return self.handle_login_flow( |         return self.handle_login_flow( | ||||||
|             source, |             source, | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authenticator""" | """Authenticator""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageAuthenticatorConfig(AppConfig): | class AuthentikStageAuthenticatorConfig(ManagedAppConfig): | ||||||
|     """Authenticator App config""" |     """Authenticator App config""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.authenticator" |     name = "authentik.stages.authenticator" | ||||||
|     label = "authentik_stages_authenticator" |     label = "authentik_stages_authenticator" | ||||||
|     verbose_name = "authentik Stages.Authenticator" |     verbose_name = "authentik Stages.Authenticator" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """SMS""" | """SMS""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageAuthenticatorSMSConfig(AppConfig): | class AuthentikStageAuthenticatorSMSConfig(ManagedAppConfig): | ||||||
|     """SMS App config""" |     """SMS App config""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.authenticator_sms" |     name = "authentik.stages.authenticator_sms" | ||||||
|     label = "authentik_stages_authenticator_sms" |     label = "authentik_stages_authenticator_sms" | ||||||
|     verbose_name = "authentik Stages.Authenticator.SMS" |     verbose_name = "authentik Stages.Authenticator.SMS" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """TOTP""" | """TOTP""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageAuthenticatorTOTPConfig(AppConfig): | class AuthentikStageAuthenticatorTOTPConfig(ManagedAppConfig): | ||||||
|     """TOTP App config""" |     """TOTP App config""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.authenticator_totp" |     name = "authentik.stages.authenticator_totp" | ||||||
|     label = "authentik_stages_authenticator_totp" |     label = "authentik_stages_authenticator_totp" | ||||||
|     verbose_name = "authentik Stages.Authenticator.TOTP" |     verbose_name = "authentik Stages.Authenticator.TOTP" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """Authenticator Validation Stage""" | """Authenticator Validation Stage""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageAuthenticatorValidateConfig(AppConfig): | class AuthentikStageAuthenticatorValidateConfig(ManagedAppConfig): | ||||||
|     """Authenticator Validation Stage""" |     """Authenticator Validation Stage""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.authenticator_validate" |     name = "authentik.stages.authenticator_validate" | ||||||
|     label = "authentik_stages_authenticator_validate" |     label = "authentik_stages_authenticator_validate" | ||||||
|     verbose_name = "authentik Stages.Authenticator.Validate" |     verbose_name = "authentik Stages.Authenticator.Validate" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -151,9 +151,7 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | |||||||
|             webauthn_user_verification=UserVerification.PREFERRED, |             webauthn_user_verification=UserVerification.PREFERRED, | ||||||
|         ) |         ) | ||||||
|         stage.webauthn_allowed_device_types.set( |         stage.webauthn_allowed_device_types.set( | ||||||
|             WebAuthnDeviceType.objects.filter( |             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||||
|                 description="Android Authenticator with SafetyNet Attestation" |  | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) |         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||||
| @ -339,9 +337,7 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | |||||||
|             device_classes=[DeviceClasses.WEBAUTHN], |             device_classes=[DeviceClasses.WEBAUTHN], | ||||||
|         ) |         ) | ||||||
|         stage.webauthn_allowed_device_types.set( |         stage.webauthn_allowed_device_types.set( | ||||||
|             WebAuthnDeviceType.objects.filter( |             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||||
|                 description="Android Authenticator with SafetyNet Attestation" |  | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) |         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||||
|  | |||||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @ -141,9 +141,7 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase): | |||||||
|         """Test registration with restricted devices (fail)""" |         """Test registration with restricted devices (fail)""" | ||||||
|         webauthn_mds_import.delay(force=True).get() |         webauthn_mds_import.delay(force=True).get() | ||||||
|         self.stage.device_type_restrictions.set( |         self.stage.device_type_restrictions.set( | ||||||
|             WebAuthnDeviceType.objects.filter( |             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||||
|                 description="Android Authenticator with SafetyNet Attestation" |  | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) |         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik captcha app""" | """authentik captcha app""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageCaptchaConfig(AppConfig): | class AuthentikStageCaptchaConfig(ManagedAppConfig): | ||||||
|     """authentik captcha app""" |     """authentik captcha app""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.captcha" |     name = "authentik.stages.captcha" | ||||||
|     label = "authentik_stages_captcha" |     label = "authentik_stages_captcha" | ||||||
|     verbose_name = "authentik Stages.Captcha" |     verbose_name = "authentik Stages.Captcha" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik consent app""" | """authentik consent app""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageConsentConfig(AppConfig): | class AuthentikStageConsentConfig(ManagedAppConfig): | ||||||
|     """authentik consent app""" |     """authentik consent app""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.consent" |     name = "authentik.stages.consent" | ||||||
|     label = "authentik_stages_consent" |     label = "authentik_stages_consent" | ||||||
|     verbose_name = "authentik Stages.Consent" |     verbose_name = "authentik Stages.Consent" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -4,6 +4,8 @@ from uuid import uuid4 | |||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
|  | from django.utils.translation import gettext as _ | ||||||
|  | from rest_framework.exceptions import ValidationError | ||||||
| from rest_framework.fields import CharField | from rest_framework.fields import CharField | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| @ -47,6 +49,11 @@ class ConsentChallengeResponse(ChallengeResponse): | |||||||
|     component = CharField(default="ak-stage-consent") |     component = CharField(default="ak-stage-consent") | ||||||
|     token = CharField(required=True) |     token = CharField(required=True) | ||||||
|  |  | ||||||
|  |     def validate_token(self, token: str): | ||||||
|  |         if token != self.stage.executor.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||||
|  |             raise ValidationError(_("Invalid consent token, re-showing prompt")) | ||||||
|  |         return token | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConsentStageView(ChallengeStageView): | class ConsentStageView(ChallengeStageView): | ||||||
|     """Simple consent checker.""" |     """Simple consent checker.""" | ||||||
| @ -120,9 +127,6 @@ class ConsentStageView(ChallengeStageView): | |||||||
|         return super().get(request, *args, **kwargs) |         return super().get(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: |     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: | ||||||
|         if response.data["token"] != self.request.session[SESSION_KEY_CONSENT_TOKEN]: |  | ||||||
|             self.logger.info("Invalid consent token, re-showing prompt") |  | ||||||
|             return self.get(self.request) |  | ||||||
|         if self.should_always_prompt(): |         if self.should_always_prompt(): | ||||||
|             return self.executor.stage_ok() |             return self.executor.stage_ok() | ||||||
|         current_stage: ConsentStage = self.executor.current_stage |         current_stage: ConsentStage = self.executor.current_stage | ||||||
|  | |||||||
| @ -17,6 +17,7 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN | |||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | ||||||
| from authentik.stages.consent.stage import ( | from authentik.stages.consent.stage import ( | ||||||
|  |     PLAN_CONTEXT_CONSENT_HEADER, | ||||||
|     PLAN_CONTEXT_CONSENT_PERMISSIONS, |     PLAN_CONTEXT_CONSENT_PERMISSIONS, | ||||||
|     SESSION_KEY_CONSENT_TOKEN, |     SESSION_KEY_CONSENT_TOKEN, | ||||||
| ) | ) | ||||||
| @ -33,6 +34,40 @@ class TestConsentStage(FlowTestCase): | |||||||
|             slug=generate_id(), |             slug=generate_id(), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     def test_mismatched_token(self): | ||||||
|  |         """Test incorrect token""" | ||||||
|  |         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||||
|  |         stage = ConsentStage.objects.create(name=generate_id(), mode=ConsentMode.ALWAYS_REQUIRE) | ||||||
|  |         binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2) | ||||||
|  |  | ||||||
|  |         plan = FlowPlan(flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()]) | ||||||
|  |         session = self.client.session | ||||||
|  |         session[SESSION_KEY_PLAN] = plan | ||||||
|  |         session.save() | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|  |         session = self.client.session | ||||||
|  |         response = self.client.post( | ||||||
|  |             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||||
|  |             { | ||||||
|  |                 "token": generate_id(), | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |         self.assertStageResponse( | ||||||
|  |             response, | ||||||
|  |             flow, | ||||||
|  |             component="ak-stage-consent", | ||||||
|  |             response_errors={ | ||||||
|  |                 "token": [{"string": "Invalid consent token, re-showing prompt", "code": "invalid"}] | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  |         self.assertFalse(UserConsent.objects.filter(user=self.user).exists()) | ||||||
|  |  | ||||||
|     def test_always_required(self): |     def test_always_required(self): | ||||||
|         """Test always required consent""" |         """Test always required consent""" | ||||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) |         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||||
| @ -158,6 +193,7 @@ class TestConsentStage(FlowTestCase): | |||||||
|             context={ |             context={ | ||||||
|                 PLAN_CONTEXT_APPLICATION: self.application, |                 PLAN_CONTEXT_APPLICATION: self.application, | ||||||
|                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], |                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], | ||||||
|  |                 PLAN_CONTEXT_CONSENT_HEADER: "test header", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik deny stage app config""" | """authentik deny stage app config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageDenyConfig(AppConfig): | class AuthentikStageDenyConfig(ManagedAppConfig): | ||||||
|     """authentik deny stage config""" |     """authentik deny stage config""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.deny" |     name = "authentik.stages.deny" | ||||||
|     label = "authentik_stages_deny" |     label = "authentik_stages_deny" | ||||||
|     verbose_name = "authentik Stages.Deny" |     verbose_name = "authentik Stages.Deny" | ||||||
|  |     default = True | ||||||
|  | |||||||
| @ -1,11 +1,12 @@ | |||||||
| """authentik dummy stage config""" | """authentik dummy stage config""" | ||||||
|  |  | ||||||
| from django.apps import AppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikStageDummyConfig(AppConfig): | class AuthentikStageDummyConfig(ManagedAppConfig): | ||||||
|     """authentik dummy stage config""" |     """authentik dummy stage config""" | ||||||
|  |  | ||||||
|     name = "authentik.stages.dummy" |     name = "authentik.stages.dummy" | ||||||
|     label = "authentik_stages_dummy" |     label = "authentik_stages_dummy" | ||||||
|     verbose_name = "authentik Stages.Dummy" |     verbose_name = "authentik Stages.Dummy" | ||||||
|  |     default = True | ||||||
|  | |||||||
							
								
								
									
										38
									
								
								authentik/stages/email/flow.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								authentik/stages/email/flow.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,38 @@ | |||||||
|  | from base64 import b64encode | ||||||
|  | from copy import deepcopy | ||||||
|  | from pickle import dumps  # nosec | ||||||
|  |  | ||||||
|  | from django.utils.translation import gettext as _ | ||||||
|  |  | ||||||
|  | from authentik.flows.models import FlowToken, in_memory_stage | ||||||
|  | from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan | ||||||
|  | from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def pickle_flow_token_for_email(plan: FlowPlan): | ||||||
|  |     """Insert a consent stage into the flow plan and pickle it for a FlowToken, | ||||||
|  |     to be sent via Email. This is to prevent automated email scanners, which sometimes | ||||||
|  |     open links in emails in a full browser from breaking the link.""" | ||||||
|  |     plan_copy = deepcopy(plan) | ||||||
|  |     plan_copy.insert_stage(in_memory_stage(EmailTokenRevocationConsentStageView), index=0) | ||||||
|  |     plan_copy.context[PLAN_CONTEXT_CONSENT_HEADER] = _("Continue to confirm this email address.") | ||||||
|  |     data = dumps(plan_copy) | ||||||
|  |     return b64encode(data).decode() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EmailTokenRevocationConsentStageView(ConsentStageView): | ||||||
|  |  | ||||||
|  |     def get(self, request, *args, **kwargs): | ||||||
|  |         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||||
|  |         try: | ||||||
|  |             token.refresh_from_db() | ||||||
|  |         except FlowToken.DoesNotExist: | ||||||
|  |             return self.executor.stage_invalid( | ||||||
|  |                 _("Link was already used, please request a new link.") | ||||||
|  |             ) | ||||||
|  |         return super().get(request, *args, **kwargs) | ||||||
|  |  | ||||||
|  |     def challenge_valid(self, response): | ||||||
|  |         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||||
|  |         token.delete() | ||||||
|  |         return super().challenge_valid(response) | ||||||
| @ -23,6 +23,7 @@ from authentik.flows.stage import ChallengeStageView | |||||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.lib.utils.time import timedelta_from_string | from authentik.lib.utils.time import timedelta_from_string | ||||||
|  | from authentik.stages.email.flow import pickle_flow_token_for_email | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -86,7 +87,8 @@ class EmailStageView(ChallengeStageView): | |||||||
|                 user=pending_user, |                 user=pending_user, | ||||||
|                 identifier=identifier, |                 identifier=identifier, | ||||||
|                 flow=self.executor.flow, |                 flow=self.executor.flow, | ||||||
|                 _plan=FlowToken.pickle(self.executor.plan), |                 _plan=pickle_flow_token_for_email(self.executor.plan), | ||||||
|  |                 revoke_on_execution=False, | ||||||
|             ) |             ) | ||||||
|         token = tokens.first() |         token = tokens.first() | ||||||
|         # Check if token is expired and rotate key if so |         # Check if token is expired and rotate key if so | ||||||
|  | |||||||
| @ -174,5 +174,5 @@ class TestEmailStageSending(FlowTestCase): | |||||||
|                 response = self.client.post(url) |                 response = self.client.post(url) | ||||||
|             response = self.client.post(url) |             response = self.client.post(url) | ||||||
|             self.assertEqual(response.status_code, 200) |             self.assertEqual(response.status_code, 200) | ||||||
|             self.assertTrue(len(mail.outbox) >= 1) |             self.assertGreaterEqual(len(mail.outbox), 1) | ||||||
|             self.assertEqual(mail.outbox[0].subject, "authentik") |             self.assertEqual(mail.outbox[0].subject, "authentik") | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	